diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index 6ebe40c3618..a1324e1034a 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -132,8 +132,8 @@ commons-lang3 - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.hbase.thirdparty diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/AsyncMetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/AsyncMetaTableAccessor.java index d2af95508de..05e60d42c0c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/AsyncMetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/AsyncMetaTableAccessor.java @@ -33,8 +33,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.MetaTableAccessor.CollectingVisitor; import org.apache.hadoop.hbase.MetaTableAccessor.QueryType; import org.apache.hadoop.hbase.MetaTableAccessor.Visitor; @@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The asynchronous meta table accessor. Used to read/write region and assignment information store @@ -62,7 +62,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class AsyncMetaTableAccessor { - private static final Log LOG = LogFactory.getLog(AsyncMetaTableAccessor.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncMetaTableAccessor.class); /** The delimiter for meta columns for replicaIds > 0 */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index 930a0a3a671..e2982bd2f51 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -25,8 +25,6 @@ import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.client.RegionInfo; @@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.DataInputBuffer; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; @@ -76,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; @Deprecated @InterfaceAudience.Public public class HRegionInfo implements RegionInfo, Comparable { - private static final Log LOG = LogFactory.getLog(HRegionInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(HRegionInfo.class); /** * The new format for a region name contains its encodedName at the end. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index 077494509c3..4f14192ceb6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -34,8 +34,6 @@ import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.client.Connection; @@ -71,7 +69,8 @@ import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -142,8 +141,8 @@ public class MetaTableAccessor { * separated by "," */ - private static final Log LOG = LogFactory.getLog(MetaTableAccessor.class); - private static final Log METALOG = LogFactory.getLog("org.apache.hadoop.hbase.META"); + private static final Logger LOG = LoggerFactory.getLogger(MetaTableAccessor.class); + private static final Logger METALOG = LoggerFactory.getLogger("org.apache.hadoop.hbase.META"); // Save its daughter/parent region(s) when split/merge private static final byte[] daughterNameCq = Bytes.toBytes("_DAUGHTER_"); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java index 52eb821fd70..72494355f95 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBatchRpcRetryingCaller.java @@ -44,14 +44,14 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.MultiResponse.RegionResult; import org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext; import org.apache.hadoop.hbase.ipc.HBaseRpcController; @@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.Private class AsyncBatchRpcRetryingCaller { - private static final Log LOG = LogFactory.getLog(AsyncBatchRpcRetryingCaller.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncBatchRpcRetryingCaller.class); private final HashedWheelTimer retryTimer; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java index f9f96593ca9..c1d84ece3d6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java @@ -34,13 +34,13 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcClientFactory; @@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.util.Threads; @InterfaceAudience.Private class AsyncConnectionImpl implements AsyncConnection { - private static final Log LOG = LogFactory.getLog(AsyncConnectionImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncConnectionImpl.class); @VisibleForTesting static final HashedWheelTimer RETRY_TIMER = new HashedWheelTimer( diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java index bb4ea6b8f24..06b5b57fc99 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncMetaRegionLocator.java @@ -21,10 +21,10 @@ import static org.apache.hadoop.hbase.client.AsyncRegionLocator.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The asynchronous locator for meta region. @@ -32,7 +32,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class AsyncMetaRegionLocator { - private static final Log LOG = LogFactory.getLog(AsyncMetaRegionLocator.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncMetaRegionLocator.class); private final AsyncRegistry registry; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java index 2adafb66e2c..45d3f528ab7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncNonMetaRegionLocator.java @@ -42,8 +42,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -52,6 +50,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The asynchronous locator for regions other than meta. @@ -59,7 +59,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class AsyncNonMetaRegionLocator { - private static final Log LOG = LogFactory.getLog(AsyncNonMetaRegionLocator.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncNonMetaRegionLocator.class); static final String MAX_CONCURRENT_LOCATE_REQUEST_PER_TABLE = "hbase.client.meta.max.concurrent.locate.per.table"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java index f6e7739047a..fc511f56cbc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java @@ -34,8 +34,6 @@ import java.util.Objects; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; @@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows; import org.apache.hadoop.hbase.client.RequestController.ReturnCode; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; @@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private @InterfaceStability.Evolving class AsyncProcess { - private static final Log LOG = LogFactory.getLog(AsyncProcess.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncProcess.class); private static final AtomicLong COUNTER = new AtomicLong(); public static final String PRIMARY_CALL_TIMEOUT_KEY = "hbase.client.primaryCallTimeout.multiget"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionLocator.java index 6a2870859b9..14638949d43 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionLocator.java @@ -30,11 +30,11 @@ import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.exceptions.RegionMovedException; import org.apache.hadoop.hbase.exceptions.TimeoutIOException; import org.apache.hadoop.hbase.util.Bytes; @@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private class AsyncRegionLocator { - private static final Log LOG = LogFactory.getLog(AsyncRegionLocator.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncRegionLocator.class); private final HashedWheelTimer retryTimer; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java index 91225a76100..ed1bdb3e476 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java @@ -37,8 +37,6 @@ import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CallQueueTooBigException; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; @@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.backoff.ServerStatistics; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; @@ -71,7 +71,7 @@ import org.apache.htrace.core.Tracer; @InterfaceAudience.Private class AsyncRequestFutureImpl implements AsyncRequestFuture { - private static final Log LOG = LogFactory.getLog(AsyncRequestFutureImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncRequestFutureImpl.class); private RetryingTimeTracker tracker; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java index 70cf4954d81..15045af8d35 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCaller.java @@ -32,17 +32,17 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Supplier; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.Private public abstract class AsyncRpcRetryingCaller { - private static final Log LOG = LogFactory.getLog(AsyncRpcRetryingCaller.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncRpcRetryingCaller.class); private final HashedWheelTimer retryTimer; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java index 51c243abfca..0dcab3848a2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java @@ -35,8 +35,6 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.NotServingRegionException; @@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer; import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout; @@ -70,7 +69,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRespon @InterfaceAudience.Private class AsyncScanSingleRegionRpcRetryingCaller { - private static final Log LOG = LogFactory.getLog(AsyncScanSingleRegionRpcRetryingCaller.class); + private static final Logger LOG = + LoggerFactory.getLogger(AsyncScanSingleRegionRpcRetryingCaller.class); private final HashedWheelTimer retryTimer; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java index fe9645a5197..c91c2174fd6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java @@ -24,9 +24,9 @@ import java.io.InterruptedIOException; import java.util.ArrayDeque; import java.util.Queue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; @InterfaceAudience.Private class AsyncTableResultScanner implements ResultScanner, AdvancedScanResultConsumer { - private static final Log LOG = LogFactory.getLog(AsyncTableResultScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncTableResultScanner.class); private final AsyncTable rawTable; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BatchErrors.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BatchErrors.java index 95b3484aa6b..d3cdc74fdf6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BatchErrors.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BatchErrors.java @@ -19,15 +19,15 @@ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; class BatchErrors { - private static final Log LOG = LogFactory.getLog(BatchErrors.class); + private static final Logger LOG = LoggerFactory.getLogger(BatchErrors.class); final List throwables = new ArrayList<>(); final List actions = new ArrayList<>(); final List addresses = new ArrayList<>(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java index b91038d0073..d2013723f15 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java @@ -15,13 +15,6 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.TableName; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import java.io.IOException; import java.io.InterruptedIOException; import java.util.Collections; @@ -33,8 +26,16 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.hadoop.hbase.HConstants; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** *

@@ -59,7 +60,7 @@ import org.apache.hadoop.hbase.ipc.RpcControllerFactory; @InterfaceStability.Evolving public class BufferedMutatorImpl implements BufferedMutator { - private static final Log LOG = LogFactory.getLog(BufferedMutatorImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorImpl.class); private final ExceptionListener listener; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java index d48462f974d..d4b4b4a9020 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.lang.management.ManagementFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Private final class ClientIdGenerator { - private static final Log LOG = LogFactory.getLog(ClientIdGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(ClientIdGenerator.class); private ClientIdGenerator() {} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index 738f095c012..a0f7ad8cfa4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -30,8 +30,6 @@ import java.util.Queue; import java.util.concurrent.ExecutorService; import org.apache.commons.lang3.mutable.MutableBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -41,6 +39,8 @@ import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults; import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; import org.apache.hadoop.hbase.exceptions.ScannerResetException; @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private public abstract class ClientScanner extends AbstractClientScanner { - private static final Log LOG = LogFactory.getLog(ClientScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(ClientScanner.class); protected final Scan scan; protected boolean closed = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java index 7f20436705b..5ff1b67c611 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java @@ -31,8 +31,6 @@ import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Threads; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap; import org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufInputStream; import org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext; @@ -63,7 +62,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos; */ @InterfaceAudience.Private class ClusterStatusListener implements Closeable { - private static final Log LOG = LogFactory.getLog(ClusterStatusListener.class); + private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusListener.class); private final List deadServers = new ArrayList<>(); protected final DeadServerHandler deadServerHandler; private final Listener listener; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index 5d71d9cb589..1f34dba5b40 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -49,8 +49,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CallQueueTooBigException; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.exceptions.RegionMovedException; import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcClientFactory; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; @@ -82,7 +81,8 @@ import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; @@ -138,7 +138,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Updat @InterfaceAudience.Private class ConnectionImplementation implements ClusterConnection, Closeable { public static final String RETRIES_BY_SERVER_KEY = "hbase.client.retries.by.server"; - private static final Log LOG = LogFactory.getLog(ConnectionImplementation.class); + private static final Logger LOG = LoggerFactory.getLogger(ConnectionImplementation.class); private static final String RESOLVE_HOSTNAME_ON_FAIL_KEY = "hbase.resolve.hostnames.on.failure"; @@ -1882,9 +1882,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable { @Override public void abort(final String msg, Throwable t) { if (t != null) { - LOG.fatal(msg, t); + LOG.error(HBaseMarkers.FATAL, msg, t); } else { - LOG.fatal(msg); + LOG.error(HBaseMarkers.FATAL, msg); } this.aborted = true; close(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java index e27bf718502..f3489e13536 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java @@ -32,8 +32,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.DNS; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterServ @InterfaceAudience.Private public final class ConnectionUtils { - private static final Log LOG = LogFactory.getLog(ConnectionUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(ConnectionUtils.class); private ConnectionUtils() { } @@ -110,7 +109,7 @@ public final class ConnectionUtils { * @param log Used to log what we set in here. */ public static void setServerSideHConnectionRetriesConfig(final Configuration c, final String sn, - final Log log) { + final Logger log) { // TODO: Fix this. Not all connections from server side should have 10 times the retries. int hcRetries = c.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java index 05f2511731a..8ab5d850d2d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelayingRunner.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import java.util.List; @@ -40,7 +40,7 @@ import java.util.Map; */ @InterfaceAudience.Private public class DelayingRunner implements Runnable { - private static final Log LOG = LogFactory.getLog(DelayingRunner.class); + private static final Logger LOG = LoggerFactory.getLogger(DelayingRunner.class); private final Object sleepLock = new Object(); private boolean triggerWake = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java index 442bf1d270f..bb265a43f62 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java @@ -20,15 +20,14 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse; @@ -38,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegion */ @InterfaceAudience.Private public class FlushRegionCallable extends RegionAdminServiceCallable { - private static final Log LOG = LogFactory.getLog(FlushRegionCallable.class); + private static final Logger LOG = LoggerFactory.getLogger(FlushRegionCallable.class); private final byte[] regionName; private final boolean writeFlushWalMarker; private boolean reload; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java index 059a5fd1eaa..80b8a221d38 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -30,10 +30,10 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.security.access.Permission; @@ -66,7 +66,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public public class Get extends Query implements Row, Comparable { - private static final Log LOG = LogFactory.getLog(Get.class); + private static final Logger LOG = LoggerFactory.getLogger(Get.class); private byte [] row = null; private int maxVersions = 1; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 207d28b912b..600ee696145 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -46,8 +46,6 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.CacheEvictionStats; @@ -103,6 +101,8 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -223,7 +223,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; @InterfaceAudience.Private @InterfaceStability.Evolving public class HBaseAdmin implements Admin { - private static final Log LOG = LogFactory.getLog(HBaseAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseAdmin.class); private ClusterConnection connection; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 939398fc2e8..8d15140da89 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -26,8 +26,6 @@ import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.filter.BinaryComparator; @@ -103,7 +103,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies; @InterfaceAudience.Private @InterfaceStability.Stable public class HTable implements Table { - private static final Log LOG = LogFactory.getLog(HTable.class); + private static final Logger LOG = LoggerFactory.getLogger(HTable.class); private static final Consistency DEFAULT_CONSISTENCY = Consistency.STRONG; private final ClusterConnection connection; private final TableName tableName; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java index 77d4fb2923f..a33fd1d9b62 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java @@ -35,8 +35,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -66,7 +65,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa */ @InterfaceAudience.Public public class HTableMultiplexer { - private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HTableMultiplexer.class.getName()); public static final String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS = "hbase.tablemultiplexer.flush.period.ms"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java index 6dc46d39d06..a3677890d48 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetaCache.java @@ -27,8 +27,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.CopyOnWriteArraySet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.RegionLocations; @@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.types.CopyOnWriteArrayMap; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A cache implementation for region locations from meta. @@ -44,7 +44,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class MetaCache { - private static final Log LOG = LogFactory.getLog(MetaCache.class); + private static final Logger LOG = LoggerFactory.getLogger(MetaCache.class); /** * Map of table to table {@link HRegionLocation}s. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java index edcc8d6cf73..e38d8faca0d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java @@ -27,12 +27,12 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.commons.lang3.mutable.MutableBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException; import org.apache.hadoop.hbase.ipc.CallTimeoutException; @@ -66,8 +66,8 @@ import org.apache.hadoop.ipc.RemoteException; @InterfaceAudience.Private class PreemptiveFastFailInterceptor extends RetryingCallerInterceptor { - private static final Log LOG = LogFactory - .getLog(PreemptiveFastFailInterceptor.class); + private static final Logger LOG = LoggerFactory + .getLogger(PreemptiveFastFailInterceptor.class); // amount of time to wait before we consider a server to be in fast fail // mode diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java index 5e9356a1fe3..bb427b1b494 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java @@ -45,8 +45,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AsyncMetaTableAccessor; import org.apache.hadoop.hbase.ClusterStatus; @@ -87,7 +85,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.ForeignExceptionUtil; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; import org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer; import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout; @@ -270,7 +269,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; class RawAsyncHBaseAdmin implements AsyncAdmin { public static final String FLUSH_TABLE_PROCEDURE_SIGNATURE = "flush-table-proc"; - private static final Log LOG = LogFactory.getLog(AsyncHBaseAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncHBaseAdmin.class); private final AsyncConnectionImpl connection; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java index ee954379fbc..448302c854d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse; @@ -43,7 +43,7 @@ import com.google.protobuf.RpcController; */ @InterfaceAudience.Private class RegionCoprocessorRpcChannel extends SyncCoprocessorRpcChannel { - private static final Log LOG = LogFactory.getLog(RegionCoprocessorRpcChannel.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionCoprocessorRpcChannel.class); private final TableName table; private final byte [] row; private final ClusterConnection conn; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java index e17e307205c..223b6fd3da2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfoBuilder.java @@ -21,16 +21,16 @@ package org.apache.hadoop.hbase.client; import java.util.Arrays; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class RegionInfoBuilder { - private static final Log LOG = LogFactory.getLog(RegionInfoBuilder.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionInfoBuilder.class); /** A non-capture group so that this can be embedded. */ public static final String ENCODED_REGION_NAME_REGEX = "(?:[a-f0-9]+)"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java index b05ad64146c..70d32d51408 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java @@ -26,10 +26,10 @@ import java.util.concurrent.RunnableFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; /** @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; */ @InterfaceAudience.Private public class ResultBoundedCompletionService { - private static final Log LOG = LogFactory.getLog(ResultBoundedCompletionService.class); + private static final Logger LOG = LoggerFactory.getLogger(ResultBoundedCompletionService.class); private final RpcRetryingCallerFactory retryingCallerFactory; private final Executor executor; private final QueueingFuture[] tasks; // all the tasks diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java index b5287a785eb..838e8fc695f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetryingCallerInterceptorFactory.java @@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.client; import java.lang.reflect.Constructor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Factory implementation to provide the {@link ConnectionImplementation} with @@ -35,8 +35,8 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class RetryingCallerInterceptorFactory { - private static final Log LOG = LogFactory - .getLog(RetryingCallerInterceptorFactory.class); + private static final Logger LOG = LoggerFactory + .getLogger(RetryingCallerInterceptorFactory.class); private Configuration conf; private final boolean failFast; public static final RetryingCallerInterceptor NO_OP_INTERCEPTOR = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java index d03fe9fdbc9..e7a3d180104 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.ReflectionUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Factory to create an {@link RpcRetryingCaller} @@ -32,7 +32,7 @@ public class RpcRetryingCallerFactory { /** Configuration key for a custom {@link RpcRetryingCaller} */ public static final String CUSTOM_CALLER_CONF_KEY = "hbase.rpc.callerfactory.class"; - private static final Log LOG = LogFactory.getLog(RpcRetryingCallerFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(RpcRetryingCallerFactory.class); protected final Configuration conf; private final long pause; private final long pauseForCQTBE;// pause for CallQueueTooBigException, if specified @@ -47,7 +47,7 @@ public class RpcRetryingCallerFactory { public RpcRetryingCallerFactory(Configuration conf) { this(conf, RetryingCallerInterceptorFactory.NO_OP_INTERCEPTOR); } - + public RpcRetryingCallerFactory(Configuration conf, RetryingCallerInterceptor interceptor) { this.conf = conf; pause = conf.getLong(HConstants.HBASE_CLIENT_PAUSE, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java index 524281804c8..7d0e9a05de8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java @@ -28,8 +28,7 @@ import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.CallQueueTooBigException; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException; @@ -38,7 +37,8 @@ import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** @@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @InterfaceAudience.Private public class RpcRetryingCallerImpl implements RpcRetryingCaller { // LOG is being used in TestMultiRowRangeFilter, hence leaving it public - public static final Log LOG = LogFactory.getLog(RpcRetryingCallerImpl.class); + public static final Logger LOG = LoggerFactory.getLogger(RpcRetryingCallerImpl.class); /** How many retries are allowed before we start to log */ private final int startLogErrorsCnt; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java index c6ba228a785..4a31cff4a71 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java @@ -28,8 +28,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; @@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -54,7 +54,9 @@ import static org.apache.hadoop.hbase.HConstants.PRIORITY_UNSET; */ @InterfaceAudience.Private public class RpcRetryingCallerWithReadReplicas { - private static final Log LOG = LogFactory.getLog(RpcRetryingCallerWithReadReplicas.class); + private static final Logger LOG = + LoggerFactory.getLogger(RpcRetryingCallerWithReadReplicas.class); + protected final ExecutorService pool; protected final ClusterConnection cConnection; protected final Configuration conf; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 266785854ed..7139b26da98 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -29,10 +29,10 @@ import java.util.NavigableSet; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.IncompatibleFilterException; @@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Public public class Scan extends Query { - private static final Log LOG = LogFactory.getLog(Scan.class); + private static final Logger LOG = LoggerFactory.getLogger(Scan.class); private static final String RAW_ATTR = "_raw_"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 87a05d68b98..45b74ef9384 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -27,8 +27,6 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.updateServerSideMet import java.io.IOException; import java.io.InterruptedIOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; @@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.exceptions.ScannerResetException; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; @@ -62,7 +62,7 @@ public class ScannerCallable extends ClientServiceCallable { public static final String LOG_SCANNER_ACTIVITY = "hbase.client.log.scanner.activity"; // Keeping LOG public as it is being used in TestScannerHeartbeatMessages - public static final Log LOG = LogFactory.getLog(ScannerCallable.class); + public static final Logger LOG = LoggerFactory.getLogger(ScannerCallable.class); protected long scannerId = -1L; protected boolean instantiated = false; protected boolean closed = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java index 9dd1052c2bc..3cf377b8952 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java @@ -32,14 +32,14 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults; import org.apache.hadoop.hbase.util.Pair; @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.Pair; */ @InterfaceAudience.Private class ScannerCallableWithReplicas implements RetryingCallable { - private static final Log LOG = LogFactory.getLog(ScannerCallableWithReplicas.class); + private static final Logger LOG = LoggerFactory.getLogger(ScannerCallableWithReplicas.class); volatile ScannerCallable currentScannerCallable; AtomicBoolean replicaSwitched = new AtomicBoolean(false); final ClusterConnection cConnection; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java index ddcfe0b169f..023188043c5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java @@ -34,8 +34,7 @@ import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -43,6 +42,8 @@ import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; import org.apache.hadoop.hbase.util.EnvironmentEdge; @@ -54,7 +55,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.Private @InterfaceStability.Evolving class SimpleRequestController implements RequestController { - private static final Log LOG = LogFactory.getLog(SimpleRequestController.class); + private static final Logger LOG = LoggerFactory.getLogger(SimpleRequestController.class); /** * The maximum heap size for each request. */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java index 36d2b31173e..6b4419d5385 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java @@ -25,9 +25,9 @@ import com.google.protobuf.ServiceException; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; */ @InterfaceAudience.Public abstract class SyncCoprocessorRpcChannel implements CoprocessorRpcChannel { - private static final Log LOG = LogFactory.getLog(SyncCoprocessorRpcChannel.class); + private static final Logger LOG = LoggerFactory.getLogger(SyncCoprocessorRpcChannel.class); @Override @InterfaceAudience.Private diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java index 5c4f7210c9d..9f40ae6c13b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java @@ -34,8 +34,6 @@ import java.util.function.Function; import java.util.regex.Matcher; import java.util.stream.Stream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HConstants; @@ -46,13 +44,15 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @since 2.0.0 */ @InterfaceAudience.Public public class TableDescriptorBuilder { - public static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class); + public static final Logger LOG = LoggerFactory.getLogger(TableDescriptorBuilder.class); @InterfaceAudience.Private public static final String SPLIT_POLICY = "SPLIT_POLICY"; private static final Bytes SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY)); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java index bd8325e9d99..200d24dc2da 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ZKAsyncRegistry.java @@ -28,8 +28,6 @@ import java.io.IOException; import java.util.concurrent.CompletableFuture; import org.apache.commons.lang3.mutable.MutableInt; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.HRegionLocation; @@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ReadOnlyZKClient; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos; @@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos; @InterfaceAudience.Private class ZKAsyncRegistry implements AsyncRegistry { - private static final Log LOG = LogFactory.getLog(ZKAsyncRegistry.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKAsyncRegistry.class); private final ReadOnlyZKClient zk; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java index 14c9c8abe9a..03589472374 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java @@ -17,19 +17,19 @@ */ package org.apache.hadoop.hbase.client.backoff; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.ReflectionUtils; @InterfaceAudience.Private @InterfaceStability.Evolving public final class ClientBackoffPolicyFactory { - private static final Log LOG = LogFactory.getLog(ClientBackoffPolicyFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(ClientBackoffPolicyFactory.class); private ClientBackoffPolicyFactory() { } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java index 53d67762e94..6bd3fb5b15a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.client.backoff; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** @@ -33,7 +32,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Public public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy { - private static final Log LOG = LogFactory.getLog(ExponentialClientBackoffPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(ExponentialClientBackoffPolicy.class); private static final long ONE_MINUTE = 60 * 1000; public static final long DEFAULT_MAX_BACKOFF = 5 * ONE_MINUTE; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java index 5a5913c06a6..a5081cb7be5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java @@ -30,13 +30,13 @@ import java.util.Set; import java.util.TreeMap; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ReplicationPeerNotFoundException; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -72,7 +72,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Public @Deprecated public class ReplicationAdmin implements Closeable { - private static final Log LOG = LogFactory.getLog(ReplicationAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationAdmin.class); public static final String TNAME = "tableName"; public static final String CFNAME = "columnFamilyName"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java index d8c86f09d4e..ec80eca0f57 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java @@ -18,37 +18,38 @@ */ package org.apache.hadoop.hbase.client.replication; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CompoundConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; -import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Strings; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.HashMap; -import java.util.ArrayList; -import java.util.Set; -import java.util.stream.Collectors; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos; /** * Helper for TableCFs Operations. @@ -57,7 +58,7 @@ import java.util.stream.Collectors; @InterfaceStability.Stable public final class ReplicationPeerConfigUtil { - private static final Log LOG = LogFactory.getLog(ReplicationPeerConfigUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerConfigUtil.class); private ReplicationPeerConfigUtil() {} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java index 07925804f2c..e79c138e703 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionMovedException.java @@ -17,13 +17,13 @@ */ package org.apache.hadoop.hbase.exceptions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Subclass if the server knows the region is now on another server. @@ -32,7 +32,7 @@ import org.apache.yetus.audience.InterfaceStability; @InterfaceAudience.Private @InterfaceStability.Evolving public class RegionMovedException extends NotServingRegionException { - private static final Log LOG = LogFactory.getLog(RegionMovedException.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionMovedException.class); private static final long serialVersionUID = -7232903522310558396L; private final String hostname; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java index 5c708e15866..a854b996ae9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java @@ -18,11 +18,11 @@ */ package org.apache.hadoop.hbase.exceptions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Subclass if the server knows the region is now on another server. @@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceStability; @InterfaceAudience.Private @InterfaceStability.Evolving public class RegionOpeningException extends NotServingRegionException { - private static final Log LOG = LogFactory.getLog(RegionOpeningException.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionOpeningException.class); private static final long serialVersionUID = -7232903522310558395L; public RegionOpeningException(String message) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java index 6ebe2fe9116..e984212985e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java @@ -31,10 +31,10 @@ import java.util.Map; import java.util.Set; import java.util.Stack; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.util.Bytes; @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Public public class ParseFilter { - private static final Log LOG = LogFactory.getLog(ParseFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(ParseFilter.class); private static HashMap operatorPrecedenceHashMap; private static HashMap filterHashMap; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java index 89cebbee02c..d1caaf0216e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java @@ -23,8 +23,6 @@ import java.nio.charset.IllegalCharsetNameException; import java.util.Arrays; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; @@ -36,7 +34,8 @@ import org.joni.Matcher; import org.joni.Option; import org.joni.Regex; import org.joni.Syntax; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** @@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE @InterfaceAudience.Public public class RegexStringComparator extends ByteArrayComparable { - private static final Log LOG = LogFactory.getLog(RegexStringComparator.class); + private static final Logger LOG = LoggerFactory.getLogger(RegexStringComparator.class); private Engine engine; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java index 0e7f376ff6b..2e902af0d7c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java @@ -49,12 +49,12 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.MetricsConnection; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.codec.KeyValueCodec; @@ -94,7 +94,7 @@ import org.apache.hadoop.security.token.TokenSelector; @InterfaceAudience.Private public abstract class AbstractRpcClient implements RpcClient { // Log level is being changed in tests - public static final Log LOG = LogFactory.getLog(AbstractRpcClient.class); + public static final Logger LOG = LoggerFactory.getLogger(AbstractRpcClient.class); protected static final HashedWheelTimer WHEEL_TIMER = new HashedWheelTimer( Threads.newDaemonThreadFactory("RpcClient-timer"), 10, TimeUnit.MILLISECONDS); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java index bd761802c68..113babb4b83 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java @@ -44,14 +44,14 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ThreadLocalRandom; import javax.security.sasl.SaslException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.exceptions.ConnectionClosingException; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; import org.apache.hadoop.hbase.security.SaslUtil; import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; @@ -65,7 +65,8 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; @@ -85,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeade @InterfaceAudience.Private class BlockingRpcConnection extends RpcConnection implements Runnable { - private static final Log LOG = LogFactory.getLog(BlockingRpcConnection.class); + private static final Logger LOG = LoggerFactory.getLogger(BlockingRpcConnection.class); private final BlockingRpcClient rpcClient; @@ -419,7 +420,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { if (ex instanceof SaslException) { String msg = "SASL authentication failed." + " The most likely cause is missing or invalid credentials." + " Consider 'kinit'."; - LOG.fatal(msg, ex); + LOG.error(HBaseMarkers.FATAL, msg, ex); throw new RuntimeException(msg, ex); } throw new IOException(ex); @@ -568,8 +569,9 @@ class BlockingRpcConnection extends RpcConnection implements Runnable { } waitingConnectionHeaderResponse = false; } catch (SocketTimeoutException ste) { - LOG.fatal("Can't get the connection header response for rpc timeout, please check if" + - " server has the correct configuration to support the additional function.", ste); + LOG.error(HBaseMarkers.FATAL, "Can't get the connection header response for rpc timeout, " + + "please check if server has the correct configuration to support the additional " + + "function.", ste); // timeout when waiting the connection header response, ignore the additional function throw new IOException("Timeout while waiting connection header response", ste); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java index 69c9e39d3ce..5c6ddbfc359 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java @@ -29,13 +29,13 @@ import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.io.ByteBufferInputStream; @@ -58,7 +58,7 @@ import org.apache.hadoop.io.compress.Decompressor; class CellBlockBuilder { // LOG is being used in TestCellBlockBuilder - static final Log LOG = LogFactory.getLog(CellBlockBuilder.class); + static final Logger LOG = LoggerFactory.getLogger(CellBlockBuilder.class); private final Configuration conf; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java index 4f0e5e65d05..7eb29f94e3d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java @@ -23,11 +23,11 @@ import static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Regi import java.io.IOException; import java.io.InterruptedIOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -52,7 +52,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; */ @InterfaceAudience.Private public final class CoprocessorRpcUtils { - private static final Log LOG = LogFactory.getLog(CoprocessorRpcUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(CoprocessorRpcUtils.class); /** * We assume that all HBase protobuf services share a common package name * (defined in the .proto files). diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java index 3cb8f016aa7..9b573adb4a2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServers.java @@ -23,10 +23,10 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; @@ -38,7 +38,7 @@ public class FailedServers { private final Map failedServers = new HashMap(); private long latestExpiry = 0; private final int recheckServersTimeout; - private static final Log LOG = LogFactory.getLog(FailedServers.class); + private static final Logger LOG = LoggerFactory.getLogger(FailedServers.class); public FailedServers(Configuration conf) { this.recheckServersTimeout = conf.getInt( diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java index 5a012a1e438..581483a22d5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java @@ -49,9 +49,9 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent; import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader; @@ -70,7 +70,7 @@ import org.apache.hadoop.security.UserGroupInformation; @InterfaceAudience.Private class NettyRpcConnection extends RpcConnection { - private static final Log LOG = LogFactory.getLog(NettyRpcConnection.class); + private static final Logger LOG = LoggerFactory.getLogger(NettyRpcConnection.class); private static final ScheduledExecutorService RELOGIN_EXECUTOR = Executors.newSingleThreadScheduledExecutor(Threads.newDaemonThreadFactory("Relogin")); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java index 062255bf475..13be390e156 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java @@ -34,10 +34,10 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellScanner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse; @@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException; @InterfaceAudience.Private class NettyRpcDuplexHandler extends ChannelDuplexHandler { - private static final Log LOG = LogFactory.getLog(NettyRpcDuplexHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(NettyRpcDuplexHandler.class); private final NettyRpcConnection conn; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java index bdedcf8bcb7..582067f7445 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java @@ -25,11 +25,11 @@ import java.io.IOException; import java.net.UnknownHostException; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -52,7 +52,7 @@ import org.apache.hadoop.security.token.TokenSelector; @InterfaceAudience.Private abstract class RpcConnection { - private static final Log LOG = LogFactory.getLog(RpcConnection.class); + private static final Logger LOG = LoggerFactory.getLogger(RpcConnection.class); protected final ConnectionId remoteId; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java index 16fe27df52a..e944ec2cd24 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java @@ -19,12 +19,12 @@ package org.apache.hadoop.hbase.ipc; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.ReflectionUtils; /** @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; */ @InterfaceAudience.Private public class RpcControllerFactory { - private static final Log LOG = LogFactory.getLog(RpcControllerFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(RpcControllerFactory.class); /** * Custom RPC Controller factory allows frameworks to change the RPC controller. If the configured diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java index 5d1634a392b..1e79186fb87 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java @@ -25,11 +25,11 @@ import java.util.LinkedList; import java.util.Objects; import java.util.Queue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Result; @@ -44,7 +44,7 @@ import org.apache.hadoop.util.StringUtils; */ @InterfaceAudience.Public public class QuotaRetriever implements Closeable, Iterable { - private static final Log LOG = LogFactory.getLog(QuotaRetriever.class); + private static final Logger LOG = LoggerFactory.getLogger(QuotaRetriever.class); private final Queue cache = new LinkedList<>(); private ResultScanner scanner; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java index e5573e4d280..9e5f092ed64 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java @@ -28,8 +28,6 @@ import java.util.Map; import java.util.Objects; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CompareOperator; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Get; @@ -89,7 +89,7 @@ import org.apache.hadoop.hbase.util.Strings; @InterfaceAudience.Private @InterfaceStability.Evolving public class QuotaTableUtil { - private static final Log LOG = LogFactory.getLog(QuotaTableUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(QuotaTableUtil.class); /** System table for quotas */ public static final TableName QUOTA_TABLE_NAME = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java index 290778ad6c2..b65b30817f0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java @@ -33,9 +33,9 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslClient; import javax.security.sasl.SaslException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; @@ -47,7 +47,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; @InterfaceAudience.Private public abstract class AbstractHBaseSaslRpcClient { - private static final Log LOG = LogFactory.getLog(AbstractHBaseSaslRpcClient.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractHBaseSaslRpcClient.class); private static final byte[] EMPTY_TOKEN = new byte[0]; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java index 8a40fc12e2c..7bba8b8466f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java @@ -28,13 +28,13 @@ import java.util.Properties; import javax.crypto.spec.SecretKeySpec; import org.apache.commons.crypto.cipher.CryptoCipherFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private @InterfaceStability.Evolving public final class EncryptionUtil { - static private final Log LOG = LogFactory.getLog(EncryptionUtil.class); + static private final Logger LOG = LoggerFactory.getLogger(EncryptionUtil.class); static private final SecureRandom RNG = new SecureRandom(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java index 5d502f18151..37d3cddfaaa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java @@ -32,10 +32,10 @@ import java.nio.ByteBuffer; import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos; import org.apache.hadoop.io.WritableUtils; @@ -52,7 +52,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; @InterfaceAudience.Private public class HBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { - private static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseSaslRpcClient.class); private boolean cryptoAesEnable; private CryptoAES cryptoAES; private InputStream saslInputStream; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java index 8b3fc5b438e..55f487dc151 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClient.java @@ -24,9 +24,9 @@ import java.io.IOException; import javax.security.sasl.Sasl; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; @@ -36,7 +36,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; */ @InterfaceAudience.Private public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient { - private static final Log LOG = LogFactory.getLog(NettyHBaseSaslRpcClient.class); + private static final Logger LOG = LoggerFactory.getLogger(NettyHBaseSaslRpcClient.class); public NettyHBaseSaslRpcClient(AuthMethod method, Token token, String serverPrincipal, boolean fallbackAllowed, String rpcProtection) throws IOException { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java index af081957c5e..db74726300b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/NettyHBaseSaslRpcClientHandler.java @@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise; import java.io.IOException; import java.security.PrivilegedExceptionAction; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.FallbackDisallowedException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; @@ -41,7 +41,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; @InterfaceAudience.Private public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler { - private static final Log LOG = LogFactory.getLog(NettyHBaseSaslRpcClientHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(NettyHBaseSaslRpcClientHandler.class); private final Promise saslPromise; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java index b30715a1e2d..d37abdf72a8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java @@ -28,13 +28,13 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class SaslUtil { - private static final Log LOG = LogFactory.getLog(SaslUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(SaslUtil.class); public static final String SASL_DEFAULT_REALM = "default"; public static final int SWITCH_TO_SIMPLE_AUTH = -88; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java index 7ff311ef560..9fa6458be5c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java @@ -24,9 +24,9 @@ import java.io.IOException; import java.util.Arrays; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.VersionedWritable; @@ -54,7 +54,7 @@ public class Permission extends VersionedWritable { public byte code() { return code; } } - private static final Log LOG = LogFactory.getLog(Permission.class); + private static final Logger LOG = LoggerFactory.getLogger(Permission.class); protected static final Map ACTION_BY_CODE = Maps.newHashMap(); protected Action[] actions; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java index 94e9c0e8827..72bd69f4970 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java @@ -22,10 +22,10 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; /** @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Private public class UserPermission extends TablePermission { - private static final Log LOG = LogFactory.getLog(UserPermission.class); + private static final Logger LOG = LoggerFactory.getLogger(UserPermission.class); private byte[] user; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java index 87826dff0a5..39959ef61db 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.security.token; import java.util.Collection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; @@ -31,7 +31,7 @@ import org.apache.hadoop.security.token.TokenSelector; @InterfaceAudience.Private public class AuthenticationTokenSelector implements TokenSelector { - private static final Log LOG = LogFactory.getLog(AuthenticationTokenSelector.class); + private static final Logger LOG = LoggerFactory.getLogger(AuthenticationTokenSelector.class); public AuthenticationTokenSelector() { } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java index 305ec4d6e40..daa906e8972 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ResponseConverter.java @@ -23,8 +23,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse; @@ -68,7 +67,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; */ @InterfaceAudience.Private public final class ResponseConverter { - private static final Log LOG = LogFactory.getLog(ResponseConverter.class); + private static final Logger LOG = LoggerFactory.getLogger(ResponseConverter.class); private ResponseConverter() { } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java index 965a243937e..b5d511cb4b0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java @@ -30,15 +30,14 @@ import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.Code; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.data.Stat; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private public final class ReadOnlyZKClient implements Closeable { - private static final Log LOG = LogFactory.getLog(ReadOnlyZKClient.class); + private static final Logger LOG = LoggerFactory.getLogger(ReadOnlyZKClient.class); public static final String RECOVERY_RETRY = "zookeeper.recovery.retry"; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java index c8a39574de4..bad78264aca 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -38,6 +36,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test setting values in the descriptor @@ -45,7 +45,7 @@ import org.junit.rules.TestName; @Category({MiscTests.class, SmallTests.class}) @Deprecated public class TestHTableDescriptor { - private static final Log LOG = LogFactory.getLog(TestHTableDescriptor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHTableDescriptor.class); @Rule public TestName name = new TestName(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java index 7b5aa5cb756..37df4decf92 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java @@ -25,10 +25,9 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -43,6 +42,8 @@ import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test cases for ensuring our client visible classes have annotations for @@ -72,7 +73,7 @@ import org.junit.experimental.categories.Category; public class TestInterfaceAudienceAnnotations { private static final String HBASE_PROTOBUF = "org.apache.hadoop.hbase.protobuf.generated"; - private static final Log LOG = LogFactory.getLog(TestInterfaceAudienceAnnotations.class); + private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAudienceAnnotations.class); /** Selects classes with generated in their package name */ static class GeneratedClassFilter implements ClassFinder.ClassFilter { @@ -315,7 +316,7 @@ public class TestInterfaceAudienceAnnotations { if (!classes.isEmpty()) { LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:"); for (Class clazz : classes) { - LOG.info(clazz); + LOG.info(Objects.toString(clazz)); } } @@ -358,7 +359,7 @@ public class TestInterfaceAudienceAnnotations { LOG.info("These are the @InterfaceAudience.Public classes that have @InterfaceStability " + "annotation:"); for (Class clazz : classes) { - LOG.info(clazz); + LOG.info(Objects.toString(clazz)); } } @@ -403,7 +404,7 @@ public class TestInterfaceAudienceAnnotations { LOG.info("These are the @InterfaceAudience.LimitedPrivate classes that DO NOT " + "have @InterfaceStability annotation:"); for (Class clazz : classes) { - LOG.info(clazz); + LOG.info(Objects.toString(clazz)); } } Assert.assertEquals("All classes that are marked with @InterfaceAudience.LimitedPrivate " + diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java index f487568edfb..dd2ac6ff6ea 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java @@ -50,8 +50,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CallQueueTooBigException; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -80,12 +78,14 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ClientTests.class, MediumTests.class}) public class TestAsyncProcess { @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestAsyncProcess.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAsyncProcess.class); private static final TableName DUMMY_TABLE = TableName.valueOf("DUMMY_TABLE"); private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index 9f3297626a8..5a311d1f7f3 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -25,6 +25,7 @@ import java.net.SocketTimeoutException; import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import java.util.Random; import java.util.SortedMap; import java.util.concurrent.CompletableFuture; @@ -35,8 +36,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.CellComparatorImpl; @@ -65,7 +64,8 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; @@ -102,7 +102,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpeci */ @Category({ClientTests.class, SmallTests.class}) public class TestClientNoCluster extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(TestClientNoCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestClientNoCluster.class); private Configuration conf; public static final ServerName META_SERVERNAME = ServerName.valueOf("meta.example.org", 16010, 12345); @@ -234,7 +234,7 @@ public class TestClientNoCluster extends Configured implements Tool { try { Result result = null; while ((result = scanner.next()) != null) { - LOG.info(result); + LOG.info(Objects.toString(result)); } } finally { scanner.close(); @@ -256,7 +256,7 @@ public class TestClientNoCluster extends Configured implements Tool { try { Result result = null; while ((result = scanner.next()) != null) { - LOG.info(result); + LOG.info(Objects.toString(result)); } } finally { scanner.close(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java index ced1eb88854..6318bc496b5 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestInterfaceAlign.java @@ -28,18 +28,18 @@ import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ClientTests.class, SmallTests.class }) public class TestInterfaceAlign { - private static final Log LOG = LogFactory.getLog(TestInterfaceAlign.class); + private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAlign.class); /** * Test methods name match up diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java index 6c9aadd099a..95369c73b41 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java @@ -22,8 +22,6 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -39,7 +37,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; /** @@ -48,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; @Category({SmallTests.class, ClientTests.class}) public class TestSnapshotFromAdmin { - private static final Log LOG = LogFactory.getLog(TestSnapshotFromAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromAdmin.class); @Rule public TestName name = new TestName(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java index 3c159b2321f..639d97401a4 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.java @@ -26,8 +26,6 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -37,13 +35,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test setting values in the descriptor */ @Category({MiscTests.class, SmallTests.class}) public class TestTableDescriptorBuilder { - private static final Log LOG = LogFactory.getLog(TestTableDescriptorBuilder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableDescriptorBuilder.class); @Rule public TestName name = new TestName(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java index 6fdb864c9f1..1ae2c4a9a11 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java @@ -24,15 +24,12 @@ import java.nio.ByteBuffer; import java.util.Arrays; import org.apache.commons.lang3.time.StopWatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.codec.KeyValueCodec; import org.apache.hadoop.hbase.io.SizedCellScanner; @@ -44,15 +41,16 @@ import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.GzipCodec; -import org.apache.log4j.Level; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ClientTests.class, SmallTests.class }) public class TestCellBlockBuilder { - private static final Log LOG = LogFactory.getLog(TestCellBlockBuilder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCellBlockBuilder.class); private CellBlockBuilder builder; @@ -190,7 +188,6 @@ public class TestCellBlockBuilder { } } CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create()); - ((Log4JLogger) CellBlockBuilder.LOG).getLogger().setLevel(Level.ALL); timerTests(builder, count, size, new KeyValueCodec(), null); timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec()); timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec()); diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index 559a795a158..27bd734f829 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -201,8 +201,8 @@ hbase-shaded-miscellaneous - commons-logging - commons-logging + org.slf4j + slf4j-api commons-codec diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java index fcfdee84550..5880b8c33b6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java @@ -21,14 +21,14 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.net.UnknownHostException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.security.UserGroupInformation; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility methods for helping with security tasks. Downstream users @@ -68,7 +68,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Public public class AuthUtil { - private static final Log LOG = LogFactory.getLog(AuthUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(AuthUtil.class); /** Prefix character to denote group names */ private static final String GROUP_PREFIX = "@"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java index 2dd1bdb79a5..771fdaa7d6c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs; @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs; @InterfaceAudience.Private @InterfaceStability.Evolving public class CellComparatorImpl implements CellComparator { - static final Log LOG = LogFactory.getLog(CellComparatorImpl.class); + static final Logger LOG = LoggerFactory.getLogger(CellComparatorImpl.class); /** * Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion * of KeyValue only. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java index ff437db5fc5..5cf37aaab02 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java @@ -27,10 +27,10 @@ import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Public public class ChoreService implements ChoreServicer { - private static final Log LOG = LogFactory.getLog(ChoreService.class); + private static final Logger LOG = LoggerFactory.getLogger(ChoreService.class); /** * The minimum number of threads in the core pool of the underlying ScheduledThreadPoolExecutor @@ -113,7 +113,7 @@ public class ChoreService implements ChoreServicer { /** * @param coreThreadPoolPrefix Prefix that will be applied to the Thread name of all threads * spawned by this service - * @param corePoolSize The initial size to set the core pool of the ScheduledThreadPoolExecutor + * @param corePoolSize The initial size to set the core pool of the ScheduledThreadPoolExecutor * to during initialization. The default size is 1, but specifying a larger size may be * beneficial if you know that 1 thread will not be enough. * @param jitter Should chore service add some jitter for all of the scheduled chores. When set @@ -331,7 +331,7 @@ public class ChoreService implements ChoreServicer { scheduledChores.clear(); choresMissingStartTime.clear(); } - + /** * @return true when the service is shutdown and thus cannot be used anymore */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java index 6012fe85f81..d21ee9389f8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java @@ -22,19 +22,19 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Adds HBase configuration files to a Configuration */ @InterfaceAudience.Public public class HBaseConfiguration extends Configuration { - private static final Log LOG = LogFactory.getLog(HBaseConfiguration.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseConfiguration.class); /** * Instantiating HBaseConfiguration() is deprecated. Please use diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 88e7d88c4d3..6a79c88062e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -35,15 +35,16 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.RawComparator; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; + /** * An HBase Key/Value. This is the fundamental HBase Type. *

@@ -81,7 +82,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe public class KeyValue implements ExtendedCell { private static final ArrayList EMPTY_ARRAY_LIST = new ArrayList<>(); - private static final Log LOG = LogFactory.getLog(KeyValue.class); + private static final Logger LOG = LoggerFactory.getLogger(KeyValue.class); public static final int FIXED_OVERHEAD = ClassSize.OBJECT + // the KeyValue object itself ClassSize.REFERENCE + // pointer to "bytes" @@ -738,9 +739,9 @@ public class KeyValue implements ExtendedCell { } public KeyValue(Cell c) { - this(c.getRowArray(), c.getRowOffset(), (int)c.getRowLength(), - c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(), - c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(), + this(c.getRowArray(), c.getRowOffset(), c.getRowLength(), + c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength(), + c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(), c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(), c.getValueLength(), c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); this.seqId = c.getSequenceId(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java index 927b2b37362..16842da3072 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java @@ -21,10 +21,9 @@ package org.apache.hadoop.hbase; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Public public abstract class ScheduledChore implements Runnable { - private static final Log LOG = LogFactory.getLog(ScheduledChore.class); + private static final Logger LOG = LoggerFactory.getLogger(ScheduledChore.class); private final String name; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java index b8816ad0170..e1a96bdcf48 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java @@ -23,18 +23,19 @@ import java.io.InputStream; import java.io.PushbackInputStream; import edu.umd.cs.findbugs.annotations.NonNull; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * TODO javadoc */ @InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX}) public abstract class BaseDecoder implements Codec.Decoder { - protected static final Log LOG = LogFactory.getLog(BaseDecoder.class); + protected static final Logger LOG = LoggerFactory.getLogger(BaseDecoder.class); protected final InputStream in; private Cell current = null; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java index 61cc170dcec..0b97abbb7d4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferListOutputStream.java @@ -23,10 +23,10 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An OutputStream which writes data into ByteBuffers. It will try to get ByteBuffer, as and when @@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class ByteBufferListOutputStream extends ByteBufferOutputStream { - private static final Log LOG = LogFactory.getLog(ByteBufferListOutputStream.class); + private static final Logger LOG = LoggerFactory.getLogger(ByteBufferListOutputStream.class); private ByteBufferPool pool; // Keep track of the BBs where bytes written to. We will first try to get a BB from the pool. If @@ -115,7 +115,7 @@ public class ByteBufferListOutputStream extends ByteBufferOutputStream { try { close(); } catch (IOException e) { - LOG.debug(e); + LOG.debug(e.toString(), e); } // Return back all the BBs to pool if (this.bufsFromPool != null) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java index 784c88fcf6f..e699ea959c5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java @@ -22,9 +22,9 @@ import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class ByteBufferPool { - private static final Log LOG = LogFactory.getLog(ByteBufferPool.class); + private static final Logger LOG = LoggerFactory.getLogger(ByteBufferPool.class); // TODO better config names? // hbase.ipc.server.reservoir.initial.max -> hbase.ipc.server.reservoir.max.buffer.count // hbase.ipc.server.reservoir.initial.buffer.size -> hbase.ipc.server.reservoir.buffer.size diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java index ec4ce38d3a7..d258ba2927d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; @@ -39,6 +37,8 @@ import org.apache.hadoop.io.compress.DoNotPool; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Compression related stuff. @@ -46,7 +46,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public final class Compression { - private static final Log LOG = LogFactory.getLog(Compression.class); + private static final Logger LOG = LoggerFactory.getLogger(Compression.class); /** * Prevent the instantiation of class. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java index 3db9d7ec440..05ea39b72a1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java @@ -22,14 +22,14 @@ import java.io.OutputStream; import java.util.Arrays; import java.util.zip.GZIPOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.util.JVM; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.CompressorStream; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression @@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class ReusableStreamGzipCodec extends GzipCodec { - private static final Log LOG = LogFactory.getLog(Compression.class); + private static final Logger LOG = LoggerFactory.getLogger(Compression.class); /** * A bridge that wraps around a DeflaterOutputStream to make it a @@ -70,7 +70,7 @@ public class ReusableStreamGzipCodec extends GzipCodec { try { gzipStream.close(); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } } } @@ -98,7 +98,7 @@ public class ReusableStreamGzipCodec extends GzipCodec { */ @Override public void finish() throws IOException { - if (HAS_BROKEN_FINISH) { + if (HAS_BROKEN_FINISH) { if (!def.finished()) { def.finish(); while (!def.finished()) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index 49cc61f2318..af0089d02cd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -27,13 +27,12 @@ import java.security.spec.InvalidKeySpecException; import java.util.Arrays; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; + import javax.crypto.SecretKeyFactory; import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -41,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A facade for encryption algorithms and related support. @@ -48,7 +49,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Public public final class Encryption { - private static final Log LOG = LogFactory.getLog(Encryption.class); + private static final Logger LOG = LoggerFactory.getLogger(Encryption.class); /** * Crypto context @@ -204,7 +205,7 @@ public final class Encryption { /** * Return a 128 bit key derived from the concatenation of the supplied * arguments using PBKDF2WithHmacSHA1 at 10,000 iterations. - * + * */ public static byte[] pbkdf128(String... args) { byte[] salt = new byte[128]; @@ -227,7 +228,7 @@ public final class Encryption { /** * Return a 128 bit key derived from the concatenation of the supplied * arguments using PBKDF2WithHmacSHA1 at 10,000 iterations. - * + * */ public static byte[] pbkdf128(byte[]... args) { byte[] salt = new byte[128]; @@ -420,7 +421,7 @@ public final class Encryption { */ public static Key getSecretKeyForSubject(String subject, Configuration conf) throws IOException { - KeyProvider provider = (KeyProvider)getKeyProvider(conf); + KeyProvider provider = getKeyProvider(conf); if (provider != null) try { Key[] keys = provider.getKeys(new String[] { subject }); if (keys != null && keys.length > 0) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java index 6c73bb4970b..93a60241ba3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java @@ -23,10 +23,9 @@ import java.io.OutputStream; import java.security.GeneralSecurityException; import java.security.Key; import java.security.SecureRandom; + import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.CipherProvider; import org.apache.hadoop.hbase.io.crypto.Context; @@ -34,6 +33,8 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @@ -48,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceStability.Evolving public class AES extends Cipher { - private static final Log LOG = LogFactory.getLog(AES.class); + private static final Logger LOG = LoggerFactory.getLogger(AES.class); public static final String CIPHER_MODE_KEY = "hbase.crypto.algorithm.aes.mode"; public static final String CIPHER_PROVIDER_KEY = "hbase.crypto.algorithm.aes.provider"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java index 412ea7947d4..57f3d3e5e4c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java @@ -24,11 +24,10 @@ import java.security.GeneralSecurityException; import java.security.Key; import java.security.SecureRandom; import java.util.Properties; + import javax.crypto.spec.SecretKeySpec; import org.apache.commons.crypto.cipher.CryptoCipherFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.CipherProvider; @@ -37,6 +36,8 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @@ -45,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceStability.Evolving public class CommonsCryptoAES extends Cipher { - private static final Log LOG = LogFactory.getLog(CommonsCryptoAES.class); + private static final Logger LOG = LoggerFactory.getLogger(CommonsCryptoAES.class); public static final String CIPHER_MODE_KEY = "hbase.crypto.commons.mode"; public static final String CIPHER_CLASSES_KEY = "hbase.crypto.commons.cipher.classes"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java index 92a3a4f089a..7dbbdba9800 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java @@ -13,16 +13,16 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.DataOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class RowIndexEncoderV1 { - private static final Log LOG = LogFactory.getLog(RowIndexEncoderV1.class); + private static final Logger LOG = LoggerFactory.getLogger(RowIndexEncoderV1.class); /** The Cell previously appended. */ private Cell lastCell = null; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/log/HBaseMarkers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/log/HBaseMarkers.java new file mode 100644 index 00000000000..ada8ad0ee81 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/log/HBaseMarkers.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.log; + +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Marker; +import org.slf4j.MarkerFactory; + +@InterfaceAudience.Private +public class HBaseMarkers { + public static final Marker FATAL = MarkerFactory.getMarker("FATAL"); + + private HBaseMarkers() { + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java index dceafbd087c..c52c764c1e0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/Superusers.java @@ -23,11 +23,11 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Keeps lists of superusers and super groups loaded from HBase configuration, @@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public final class Superusers { - private static final Log LOG = LogFactory.getLog(Superusers.class); + private static final Logger LOG = LoggerFactory.getLogger(Superusers.class); /** Configuration key for superusers */ public static final String SUPERUSER_CONF_KEY = "hbase.superuser"; // Not getting a name diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java index b31a4f6d73a..03d03d9fe4e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hbase.trace; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.htrace.core.HTraceConfiguration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class HBaseHTraceConfiguration extends HTraceConfiguration { - private static final Log LOG = LogFactory.getLog(HBaseHTraceConfiguration.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseHTraceConfiguration.class); public static final String KEY_PREFIX = "hbase.htrace."; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java index 89339c536f1..14ef945d75a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java @@ -21,11 +21,11 @@ import java.io.IOException; import java.util.Collection; import java.util.HashSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.htrace.core.SpanReceiver; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides functions for reading the names of SpanReceivers from @@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class SpanReceiverHost { public static final String SPAN_RECEIVERS_CONF_KEY = "hbase.trace.spanreceiver.classes"; - private static final Log LOG = LogFactory.getLog(SpanReceiverHost.class); + private static final Logger LOG = LoggerFactory.getLogger(SpanReceiverHost.class); private Collection receivers; private Configuration conf; private boolean closed = false; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java index 21b174e71b3..9414e315320 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java @@ -31,14 +31,14 @@ import org.apache.commons.cli.MissingOptionException; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Common base class used for HBase command-line tools. Simplifies workflow and @@ -55,7 +55,7 @@ public abstract class AbstractHBaseTool implements Tool, Configurable { private static final Option HELP_OPTION = new Option("h", "help", false, "Prints help for this tool."); - private static final Log LOG = LogFactory.getLog(AbstractHBaseTool.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractHBaseTool.class); protected final Options options = new Options(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index 91df2d56acb..fe74bcf91c1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -39,9 +39,10 @@ import java.nio.charset.StandardCharsets; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Encodes and decodes to and from Base64 notation. @@ -158,7 +159,7 @@ public class Base64 { /* ******** P R I V A T E F I E L D S ******** */ - private static final Log LOG = LogFactory.getLog(Base64.class); + private static final Logger LOG = LoggerFactory.getLogger(Base64.class); /** Maximum line length (76) of Base64 output. */ private final static int MAX_LINE_LENGTH = 76; @@ -1100,7 +1101,7 @@ public class Base64 { // Check the size of file if (file.length() > Integer.MAX_VALUE) { - LOG.fatal("File is too big for this convenience method (" + + LOG.error(HBaseMarkers.FATAL, "File is too big for this convenience method (" + file.length() + " bytes)."); return null; } // end if: file too big for int index diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java index b2e5c9b751e..6782de6bd05 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java @@ -28,14 +28,15 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.MultiByteBuff; import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.annotations.VisibleForTesting; /** * This class manages an array of ByteBuffers with a default size 4MB. These @@ -44,7 +45,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class ByteBufferArray { - private static final Log LOG = LogFactory.getLog(ByteBufferArray.class); + private static final Logger LOG = LoggerFactory.getLogger(ByteBufferArray.class); public static final int DEFAULT_BUFFER_SIZE = 4 * 1024 * 1024; @VisibleForTesting diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index c32649b0670..159a7905b96 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -36,9 +36,6 @@ import java.util.Comparator; import java.util.Iterator; import java.util.List; -import com.google.protobuf.ByteString; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; @@ -46,11 +43,16 @@ import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableUtils; import org.apache.yetus.audience.InterfaceAudience; -import sun.misc.Unsafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import com.google.protobuf.ByteString; + +import sun.misc.Unsafe; + /** * Utility class that handles byte arrays, conversions to/from other types, * comparisons, hash code generation, manufacturing keys for HashMaps or @@ -70,7 +72,7 @@ public class Bytes implements Comparable { //HConstants.EMPTY_BYTE_ARRAY should be updated if this changed private static final byte [] EMPTY_BYTE_ARRAY = new byte [0]; - private static final Log LOG = LogFactory.getLog(Bytes.class); + private static final Logger LOG = LoggerFactory.getLogger(Bytes.class); /** * Size of boolean in bytes @@ -1533,8 +1535,8 @@ public class Bytes implements Comparable { final int stride = 8; final int minLength = Math.min(length1, length2); int strideLimit = minLength & ~(stride - 1); - final long offset1Adj = (long) offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; - final long offset2Adj = (long) offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; + final long offset1Adj = offset1 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; + final long offset2Adj = offset2 + UnsafeAccess.BYTE_ARRAY_BASE_OFFSET; int i; /* @@ -1542,8 +1544,8 @@ public class Bytes implements Comparable { * than 4 bytes even on 32-bit. On the other hand, it is substantially faster on 64-bit. */ for (i = 0; i < strideLimit; i += stride) { - long lw = theUnsafe.getLong(buffer1, offset1Adj + (long) i); - long rw = theUnsafe.getLong(buffer2, offset2Adj + (long) i); + long lw = theUnsafe.getLong(buffer1, offset1Adj + i); + long rw = theUnsafe.getLong(buffer2, offset2Adj + i); if (lw != rw) { if(!UnsafeAccess.littleEndian) { return ((lw + Long.MIN_VALUE) < (rw + Long.MIN_VALUE)) ? -1 : 1; @@ -1936,7 +1938,7 @@ public class Bytes implements Comparable { public static int hashCode(byte[] bytes, int offset, int length) { int hash = 1; for (int i = offset; i < offset + length; i++) - hash = (31 * hash) + (int) bytes[i]; + hash = (31 * hash) + bytes[i]; return hash; } @@ -2517,7 +2519,7 @@ public class Bytes implements Comparable { } return new String(ch); } - + /** * Convert a byte array into a hex string */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index efcf8d0bec2..9a7f0ef70e4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -25,10 +25,9 @@ import java.lang.reflect.Modifier; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -40,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class ClassSize { - private static final Log LOG = LogFactory.getLog(ClassSize.class); + private static final Logger LOG = LoggerFactory.getLogger(ClassSize.class); /** Array overhead */ public static final int ARRAY; @@ -197,7 +196,7 @@ public class ClassSize { return (int) UnsafeAccess.theUnsafe.objectFieldOffset( HeaderSize.class.getDeclaredField("a")); } catch (NoSuchFieldException | SecurityException e) { - LOG.error(e); + LOG.error(e.toString(), e); } return super.headerSize(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java index 0e0372c94ea..5a6625bd4d3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CommonFSUtils.java @@ -29,8 +29,6 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ @InterfaceAudience.Private public abstract class CommonFSUtils { - private static final Log LOG = LogFactory.getLog(CommonFSUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(CommonFSUtils.class); /** Parameter name for HBase WAL directory */ public static final String HBASE_WAL_DIR = "hbase.wal.dir"; @@ -733,7 +733,7 @@ public abstract class CommonFSUtils { * @param LOG log to output information * @throws IOException if an unexpected exception occurs */ - public static void logFileSystemState(final FileSystem fs, final Path root, Log LOG) + public static void logFileSystemState(final FileSystem fs, final Path root, Logger LOG) throws IOException { LOG.debug("File system contents for path " + root); logFSTree(LOG, fs, root, "|-"); @@ -742,9 +742,9 @@ public abstract class CommonFSUtils { /** * Recursive helper to log the state of the FS * - * @see #logFileSystemState(FileSystem, Path, Log) + * @see #logFileSystemState(FileSystem, Path, Logger) */ - private static void logFSTree(Log LOG, final FileSystem fs, final Path root, String prefix) + private static void logFSTree(Logger LOG, final FileSystem fs, final Path root, String prefix) throws IOException { FileStatus[] files = listStatus(fs, root, null); if (files == null) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java index 15828ed8eeb..283d59a17ed 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java @@ -34,8 +34,6 @@ import java.util.jar.JarFile; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -43,6 +41,8 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; @@ -76,7 +76,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; */ @InterfaceAudience.Private public class CoprocessorClassLoader extends ClassLoaderBase { - private static final Log LOG = LogFactory.getLog(CoprocessorClassLoader.class); + private static final Logger LOG = LoggerFactory.getLogger(CoprocessorClassLoader.class); // A temporary place ${hbase.local.dir}/jars/tmp/ to store the local // copy of the jar file and the libraries contained in the jar. @@ -108,7 +108,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase { "org.xml", "sunw.", // logging - "org.apache.commons.logging", + "org.slf4j", "org.apache.log4j", "com.hadoop", // HBase: diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java index 75dcf5f0d2e..28fce21b1cb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -23,13 +23,13 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a class loader that can load classes dynamically from new @@ -57,8 +57,8 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class DynamicClassLoader extends ClassLoaderBase { - private static final Log LOG = - LogFactory.getLog(DynamicClassLoader.class); + private static final Logger LOG = + LoggerFactory.getLogger(DynamicClassLoader.class); // Dynamic jars are put under ${hbase.local.dir}/jars/ private static final String DYNAMIC_JARS_DIR = File.separator diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java index 532f8419b47..ab95b312639 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java @@ -25,6 +25,7 @@ import java.lang.reflect.Array; import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.Set; + import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; import javax.management.IntrospectionException; @@ -41,17 +42,18 @@ import javax.management.openmbean.CompositeData; import javax.management.openmbean.CompositeType; import javax.management.openmbean.TabularData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; /** * Utility for doing JSON and MBeans. */ public class JSONBean { - private static final Log LOG = LogFactory.getLog(JSONBean.class); + private static final Logger LOG = LoggerFactory.getLogger(JSONBean.class); private final JsonFactory jsonFactory; public JSONBean() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java index ae967a16e4e..b6c05b67c98 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java @@ -28,6 +28,7 @@ import java.lang.management.RuntimeMXBean; import java.util.Hashtable; import java.util.List; import java.util.Set; + import javax.management.InstanceNotFoundException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; @@ -37,15 +38,16 @@ import javax.management.ObjectName; import javax.management.ReflectionException; import javax.management.openmbean.CompositeData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; public final class JSONMetricUtil { - private static final Log LOG = LogFactory.getLog(JSONMetricUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(JSONMetricUtil.class); private static MBeanServer mbServer = ManagementFactory.getPlatformMBeanServer(); //MBeans ObjectName domain names diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java index ab966f17b41..6657481ed02 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java @@ -28,9 +28,9 @@ import java.lang.management.RuntimeMXBean; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -42,7 +42,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class JVM { - private static final Log LOG = LogFactory.getLog(JVM.class); + private static final Logger LOG = LoggerFactory.getLogger(JVM.class); private OperatingSystemMXBean osMbean; private static final boolean ibmvendor = diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java index 804c1cdf453..c38f1a9f8b9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java @@ -23,9 +23,9 @@ import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import org.apache.commons.codec.binary.Hex; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility class for MD5 @@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Public public class MD5Hash { - private static final Log LOG = LogFactory.getLog(MD5Hash.class); + private static final Logger LOG = LoggerFactory.getLogger(MD5Hash.class); /** * Given a byte array, returns in MD5 hash as a hex string. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java index 296dc643d2c..6e472a0714e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java @@ -24,13 +24,14 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.UndeclaredThrowableException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class Methods { - private static final Log LOG = LogFactory.getLog(Methods.class); + private static final Logger LOG = LoggerFactory.getLogger(Methods.class); public static Object call(Class clazz, T instance, String methodName, Class[] types, Object[] args) throws Exception { @@ -38,7 +39,7 @@ public class Methods { Method m = clazz.getMethod(methodName, types); return m.invoke(instance, args); } catch (IllegalArgumentException arge) { - LOG.fatal("Constructed invalid call. class="+clazz.getName()+ + LOG.error(HBaseMarkers.FATAL, "Constructed invalid call. class="+clazz.getName()+ " method=" + methodName + " types=" + Classes.stringify(types), arge); throw arge; } catch (NoSuchMethodException nsme) { @@ -59,8 +60,9 @@ public class Methods { throw new IllegalArgumentException( "Denied access calling "+clazz.getName()+"."+methodName+"()", iae); } catch (SecurityException se) { - LOG.fatal("SecurityException calling method. class="+clazz.getName()+ - " method=" + methodName + " types=" + Classes.stringify(types), se); + LOG.error(HBaseMarkers.FATAL, "SecurityException calling method. class="+ + clazz.getName()+" method=" + methodName + " types=" + + Classes.stringify(types), se); throw se; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java index 4f4b775af0c..147e9160f91 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java @@ -22,16 +22,16 @@ package org.apache.hadoop.hbase.util; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.exceptions.HBaseException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class PrettyPrinter { - private static final Log LOG = LogFactory.getLog(PrettyPrinter.class); + private static final Logger LOG = LoggerFactory.getLogger(PrettyPrinter.class); private static final String INTERVAL_REGEX = "((\\d+)\\s*SECONDS?\\s*\\()?\\s*" + "((\\d+)\\s*DAYS?)?\\s*((\\d+)\\s*HOURS?)?\\s*" + diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java index 6430d2eb120..a136846a922 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java @@ -29,9 +29,10 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.charset.Charset; -import edu.umd.cs.findbugs.annotations.NonNull; -import org.apache.commons.logging.Log; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; + +import edu.umd.cs.findbugs.annotations.NonNull; @InterfaceAudience.Private public class ReflectionUtils { @@ -111,7 +112,7 @@ public class ReflectionUtils { * @param title a descriptive title for the call stacks * @param minInterval the minimum time from the last */ - public static void logThreadInfo(Log log, + public static void logThreadInfo(Logger log, String title, long minInterval) { boolean dumpStack = false; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java index 116b6cce7d9..ffcd9cab33d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.util; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class RetryCounter { @@ -127,7 +127,7 @@ public class RetryCounter { } } - private static final Log LOG = LogFactory.getLog(RetryCounter.class); + private static final Logger LOG = LoggerFactory.getLogger(RetryCounter.class); private RetryConfig retryConfig; private int attempts; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java index 326da2e69c0..7d4d692e1a1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java @@ -18,10 +18,10 @@ */ package org.apache.hadoop.hbase.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Stoppable; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Sleeper for current thread. @@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class Sleeper { - private static final Log LOG = LogFactory.getLog(Sleeper.class); + private static final Logger LOG = LoggerFactory.getLogger(Sleeper.class); private final int period; private final Stoppable stopper; private static final long MINIMAL_DELTA_FOR_LOGGING = 10000; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index 4e2f09f611d..5f64f6329d3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -31,11 +31,11 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; */ @InterfaceAudience.Private public class Threads { - private static final Log LOG = LogFactory.getLog(Threads.class); + private static final Logger LOG = LoggerFactory.getLogger(Threads.class); private static final AtomicInteger poolNumber = new AtomicInteger(1); public static final UncaughtExceptionHandler LOGGING_EXCEPTION_HANDLER = diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java index 50fef6d823e..feaa9e66242 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java @@ -23,10 +23,11 @@ import java.nio.ByteOrder; import java.security.AccessController; import java.security.PrivilegedAction; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import sun.misc.Unsafe; import sun.nio.ch.DirectBuffer; @@ -34,7 +35,7 @@ import sun.nio.ch.DirectBuffer; @InterfaceStability.Evolving public final class UnsafeAccess { - private static final Log LOG = LogFactory.getLog(UnsafeAccess.class); + private static final Logger LOG = LoggerFactory.getLogger(UnsafeAccess.class); static final Unsafe theUnsafe; @@ -325,7 +326,7 @@ public final class UnsafeAccess { destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); destBase = dest.array(); } - long srcAddress = (long) srcOffset + BYTE_ARRAY_BASE_OFFSET; + long srcAddress = srcOffset + BYTE_ARRAY_BASE_OFFSET; unsafeCopy(src, srcAddress, destBase, destAddress, length); } @@ -359,7 +360,7 @@ public final class UnsafeAccess { srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset(); srcBase = src.array(); } - long destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET; + long destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET; unsafeCopy(srcBase, srcAddress, dest, destAddress, length); } @@ -386,7 +387,7 @@ public final class UnsafeAccess { if (dest.isDirect()) { destAddress = destOffset + ((DirectBuffer) dest).address(); } else { - destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); + destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); destBase = dest.array(); } unsafeCopy(srcBase, srcAddress, destBase, destAddress, length); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java index 8fe70443ae9..88dd5242961 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAvailChecker.java @@ -22,15 +22,15 @@ import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedAction; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class UnsafeAvailChecker { private static final String CLASS_NAME = "sun.misc.Unsafe"; - private static final Log LOG = LogFactory.getLog(UnsafeAvailChecker.class); + private static final Logger LOG = LoggerFactory.getLogger(UnsafeAvailChecker.class); private static boolean avail = false; private static boolean unaligned = false; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java index 07b9c5880a0..9d9b5630629 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java @@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.util; import java.io.PrintStream; import java.io.PrintWriter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Version; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class finds the Version information for HBase. */ @InterfaceAudience.Public public class VersionInfo { - private static final Log LOG = LogFactory.getLog(VersionInfo.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(VersionInfo.class.getName()); // If between two dots there is not a number, we regard it as a very large number so it is // higher than any numbers in the version. diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java index cc0e9691d7e..9dccb291fb6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java @@ -33,8 +33,8 @@ import java.util.jar.JarInputStream; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A class that finds a set of classes that are locally accessible @@ -42,7 +42,7 @@ import org.apache.commons.logging.LogFactory; * imposed by name and class filters provided by the user. */ public class ClassFinder { - private static final Log LOG = LogFactory.getLog(ClassFinder.class); + private static final Logger LOG = LoggerFactory.getLogger(ClassFinder.class); private static String CLASS_EXT = ".class"; private ResourcePathFilter resourcePathFilter; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index d84e8ec5c45..693f9b2f74e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -25,13 +25,13 @@ import java.util.List; import java.util.UUID; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Waiter.Predicate; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Common helpers for testing HBase that do not depend on specific server/etc. things. @@ -39,7 +39,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Public public class HBaseCommonTestingUtility { - protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class); + protected static final Logger LOG = LoggerFactory.getLogger(HBaseCommonTestingUtility.class); /** Compression algorithms to use in parameterized JUnit 4 tests */ public static final List COMPRESSION_ALGORITHMS_PARAMETERIZED = diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java index c0b98362c53..b42db95cfcd 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java @@ -22,8 +22,8 @@ package org.apache.hadoop.hbase; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility class to check the resources: @@ -32,7 +32,7 @@ import org.apache.commons.logging.LogFactory; * - check that they don't leak during the test */ public class ResourceChecker { - private static final Log LOG = LogFactory.getLog(ResourceChecker.class); + private static final Logger LOG = LoggerFactory.getLogger(ResourceChecker.class); private String tagLine; enum Phase { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index 7d85b97cad8..a28bad865da 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.CountingChore; import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.DoNothingChore; import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.FailInitialChore; @@ -35,10 +33,12 @@ import org.apache.hadoop.hbase.TestChoreService.ScheduledChoreSamples.SlowChore; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(SmallTests.class) public class TestChoreService { - public static final Log log = LogFactory.getLog(TestChoreService.class); + public static final Logger log = LoggerFactory.getLogger(TestChoreService.class); /** * A few ScheduledChore samples that are useful for testing with ChoreService diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java index d37d7311616..e9c3e604a50 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java @@ -41,8 +41,6 @@ import java.util.jar.Manifest; import javax.tools.JavaCompiler; import javax.tools.ToolProvider; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.AfterClass; @@ -51,11 +49,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestClassFinder { - private static final Log LOG = LogFactory.getLog(TestClassFinder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestClassFinder.class); @Rule public TestName name = new TestName(); private static final HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility(); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index abb6a2825a6..309480ab54d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -28,21 +28,20 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.AfterClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; @Category({MiscTests.class, SmallTests.class}) public class TestHBaseConfiguration { - private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseConfiguration.class); private static HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility(); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index c6b726527b3..085f357d64d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -31,15 +31,16 @@ import java.util.List; import java.util.Set; import java.util.TreeSet; -import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import junit.framework.TestCase; public class TestKeyValue extends TestCase { - private static final Log LOG = LogFactory.getLog(TestKeyValue.class); + private static final Logger LOG = LoggerFactory.getLogger(TestKeyValue.class); public void testColumnCompare() throws Exception { final byte [] a = Bytes.toBytes("aaa"); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java index 6ee52cb5c94..70750056b13 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java @@ -23,10 +23,10 @@ import static org.junit.Assert.fail; import java.text.MessageFormat; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A class that provides a standard waitFor pattern @@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public final class Waiter { - private static final Log LOG = LogFactory.getLog(Waiter.class); + private static final Logger LOG = LoggerFactory.getLogger(Waiter.class); /** * System property name whose value is a scale factor to increase time out values dynamically used diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java index 07dd601d277..8ae20d5cd92 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java @@ -24,8 +24,6 @@ import java.io.ByteArrayOutputStream; import java.security.Key; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -34,11 +32,13 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestEncryption { - private static final Log LOG = LogFactory.getLog(TestEncryption.class); + private static final Logger LOG = LoggerFactory.getLogger(TestEncryption.class); @Test public void testSmallBlocks() throws Exception { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index 790568e26aa..2fe8a8f6d62 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -29,19 +29,19 @@ import java.security.MessageDigest; import java.util.Properties; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestKeyStoreKeyProvider { - private static final Log LOG = LogFactory.getLog(TestKeyStoreKeyProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(TestKeyStoreKeyProvider.class); static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); static final String ALIAS = "test"; static final String PASSWORD = "password"; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java index 17471bb64db..0705a365710 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java @@ -35,16 +35,16 @@ import javax.tools.JavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.ToolProvider; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Some utilities to help class loader testing */ public class ClassLoaderTestHelper { - private static final Log LOG = LogFactory.getLog(ClassLoaderTestHelper.class); + private static final Logger LOG = LoggerFactory.getLogger(ClassLoaderTestHelper.class); private static final int BUFFER_SIZE = 4096; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java index e46e89b76e8..ad49e553719 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.util; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A generator of random keys and values for load testing. Keys are generated @@ -32,7 +32,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class LoadTestKVGenerator { - private static final Log LOG = LogFactory.getLog(LoadTestKVGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(LoadTestKVGenerator.class); private static int logLimit = 10; /** A random number generator for determining value size */ diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java index 7dd27d48048..a3330af1341 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCommonFSUtils.java @@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test {@link CommonFSUtils}. */ @Category({MiscTests.class, MediumTests.class}) public class TestCommonFSUtils { - private static final Log LOG = LogFactory.getLog(TestCommonFSUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCommonFSUtils.class); private HBaseCommonTestingUtility htu; private Configuration conf; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java index 5bdb668a2d2..5b80cd8fea1 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java @@ -23,8 +23,6 @@ import static org.junit.Assert.fail; import java.io.File; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -32,13 +30,15 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test TestDynamicClassLoader */ @Category({MiscTests.class, SmallTests.class}) public class TestDynamicClassLoader { - private static final Log LOG = LogFactory.getLog(TestDynamicClassLoader.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDynamicClassLoader.class); private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); private Configuration conf; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java index 2f16ee8f7fa..6b6d9b072bf 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestShowProperties.java @@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.util; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category; */ @Category({MiscTests.class, SmallTests.class}) public class TestShowProperties { - private static final Log LOG = LogFactory.getLog(TestShowProperties.class); + private static final Logger LOG = LoggerFactory.getLogger(TestShowProperties.class); @Test public void testShowProperty() { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java index a628e98ee91..e6a06c1c7b7 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestThreads.java @@ -22,16 +22,16 @@ import static org.junit.Assert.assertTrue; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestThreads { - private static final Log LOG = LogFactory.getLog(TestThreads.class); + private static final Logger LOG = LoggerFactory.getLogger(TestThreads.class); private static final int SLEEP_TIME_MS = 3000; private static final int TOLERANCE_MS = (int) (0.10 * SLEEP_TIME_MS); diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index b92a7922ba5..9ded35b6bc3 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -208,8 +208,8 @@ - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.curator diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 4535cab1294..7b071f412cc 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -38,13 +38,13 @@ import java.util.NavigableSet; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Result; @@ -83,7 +83,7 @@ import org.apache.hadoop.hbase.util.Pair; @InterfaceAudience.Public public class AggregationClient implements Closeable { // TODO: This class is not used. Move to examples? - private static final Log log = LogFactory.getLog(AggregationClient.class); + private static final Logger log = LoggerFactory.getLogger(AggregationClient.class); private final Connection connection; /** diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index 6e0e6d447a9..6beb3f66fda 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -33,11 +33,11 @@ import java.util.Collections; import java.util.List; import java.util.NavigableSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; @@ -62,7 +62,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; @InterfaceAudience.Private public class AggregateImplementation extends AggregateService implements RegionCoprocessor { - protected static final Log log = LogFactory.getLog(AggregateImplementation.class); + protected static final Logger log = LoggerFactory.getLogger(AggregateImplementation.class); private RegionCoprocessorEnvironment env; /** diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java index 66e9e044f6e..f642d610b71 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java @@ -29,8 +29,6 @@ import java.util.Map; import java.util.TreeMap; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -75,7 +73,8 @@ import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import com.google.protobuf.RpcCallback; @@ -93,7 +92,7 @@ import com.google.protobuf.Service; @InterfaceStability.Evolving public class Export extends ExportProtos.ExportService implements RegionCoprocessor { - private static final Log LOG = LogFactory.getLog(Export.class); + private static final Logger LOG = LoggerFactory.getLogger(Export.class); private static final Class DEFAULT_CODEC = DefaultCodec.class; private static final SequenceFile.CompressionType DEFAULT_TYPE = SequenceFile.CompressionType.RECORD; private RegionCoprocessorEnvironment env = null; @@ -341,7 +340,7 @@ public class Export extends ExportProtos.ExportService implements RegionCoproces done.run(response); } catch (IOException e) { CoprocessorRpcUtils.setControllerException(controller, e); - LOG.error(e); + LOG.error(e.toString(), e); } } diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java index 69d8491f9ae..18c932e22a6 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java @@ -23,8 +23,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; @@ -51,6 +49,8 @@ import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.Service; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Coprocessor service for bulk loads in secure mode. @@ -63,7 +63,7 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService implements Reg public static final long VERSION = 0L; - private static final Log LOG = LogFactory.getLog(SecureBulkLoadEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(SecureBulkLoadEndpoint.class); private RegionCoprocessorEnvironment env; private RegionServerServices rsServices; diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java index cfcb565cc3f..fd570e7854c 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationP import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -45,7 +45,7 @@ import com.google.protobuf.Service; */ public class ColumnAggregationEndpoint extends ColumnAggregationService implements RegionCoprocessor { - private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(ColumnAggregationEndpoint.class); private RegionCoprocessorEnvironment env = null; @Override diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java index 80316d3984a..5effbe9fa40 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationW import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -48,7 +48,9 @@ import com.google.protobuf.Service; */ public class ColumnAggregationEndpointNullResponse extends ColumnAggregationServiceNullResponse implements RegionCoprocessor { - private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointNullResponse.class); + private static final Logger LOG = + LoggerFactory.getLogger(ColumnAggregationEndpointNullResponse.class); + private RegionCoprocessorEnvironment env = null; @Override public Iterable getServices() { diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java index 49b79ce51f6..39e3b12cd21 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -50,7 +50,9 @@ import com.google.protobuf.Service; public class ColumnAggregationEndpointWithErrors extends ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors implements RegionCoprocessor { - private static final Log LOG = LogFactory.getLog(ColumnAggregationEndpointWithErrors.class); + private static final Logger LOG = + LoggerFactory.getLogger(ColumnAggregationEndpointWithErrors.class); + private RegionCoprocessorEnvironment env = null; @Override diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java index eecf7a3f0cb..5433792986d 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java @@ -25,8 +25,6 @@ import java.util.Collections; import java.util.Map; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -51,6 +49,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.ByteString; import com.google.protobuf.ServiceException; @@ -60,7 +60,7 @@ import com.google.protobuf.ServiceException; */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestBatchCoprocessorEndpoint { - private static final Log LOG = LogFactory.getLog(TestBatchCoprocessorEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(TestBatchCoprocessorEndpoint.class); private static final TableName TEST_TABLE = TableName.valueOf("TestTable"); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java index e52e0328239..37e5a78cb12 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.coprocessor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -48,6 +46,8 @@ import java.util.*; import org.junit.*; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -59,7 +59,7 @@ import static org.junit.Assert.assertFalse; */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestClassLoading { - private static final Log LOG = LogFactory.getLog(TestClassLoading.class); + private static final Logger LOG = LoggerFactory.getLogger(TestClassLoading.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); public static class TestMasterCoprocessor implements MasterCoprocessor, MasterObserver { diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java index adfd8d53e5e..376c0715866 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java @@ -30,8 +30,6 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -59,6 +57,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; @@ -68,7 +68,7 @@ import com.google.protobuf.ServiceException; */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestCoprocessorEndpoint { - private static final Log LOG = LogFactory.getLog(TestCoprocessorEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorEndpoint.class); private static final TableName TEST_TABLE = TableName.valueOf("TestCoprocessorEndpoint"); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java index 8c111926770..8a79400bcb4 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java @@ -74,10 +74,10 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.Message; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; /** * Verifies ProcessEndpoint works. @@ -86,7 +86,7 @@ import org.apache.commons.logging.LogFactory; @Category({CoprocessorTests.class, MediumTests.class}) public class TestRowProcessorEndpoint { - private static final Log LOG = LogFactory.getLog(TestRowProcessorEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRowProcessorEndpoint.class); private static final TableName TABLE = TableName.valueOf("testtable"); private final static byte[] ROW = Bytes.toBytes("testrow"); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java index e4cd54d5811..76ef82504ea 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java @@ -27,8 +27,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -82,10 +81,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MediumTests.class}) public class TestSecureExport { - private static final Log LOG = LogFactory.getLog(TestSecureExport.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSecureExport.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static MiniKdc KDC; private static final File KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath()); @@ -311,7 +312,7 @@ public class TestSecureExport { } catch (ServiceException | IOException ex) { throw ex; } catch (Throwable ex) { - LOG.error(ex); + LOG.error(ex.toString(), ex); throw new Exception(ex); } finally { clearOutput(output); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java index 25953bc2a92..2daacde5a0f 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; @@ -49,7 +47,8 @@ import org.junit.Ignore; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -64,8 +63,8 @@ public class TestHRegionServerBulkLoadWithOldSecureEndpoint extends TestHRegionS super(duration); } - private static final Log LOG = - LogFactory.getLog(TestHRegionServerBulkLoadWithOldSecureEndpoint.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestHRegionServerBulkLoadWithOldSecureEndpoint.class); @BeforeClass public static void setUpBeforeClass() throws IOException { diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java index bad01f3eae7..0b17abf0dc1 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.Collections; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionLocation; @@ -61,6 +59,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -69,7 +69,7 @@ import com.google.protobuf.ServiceException; @Category({RegionServerTests.class, MediumTests.class}) public class TestServerCustomProtocol { - private static final Log LOG = LogFactory.getLog(TestServerCustomProtocol.class); + private static final Logger LOG = LoggerFactory.getLogger(TestServerCustomProtocol.class); static final String WHOAREYOU = "Who are you?"; static final String NOBODY = "nobody"; static final String HELLO = "Hello, "; diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java index 0b33d20e7d5..1b74b7d7245 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java @@ -21,8 +21,6 @@ import java.util.List; import java.util.Set; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; @@ -37,12 +35,14 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.HFileTestUtil; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ReplicationTests.class, LargeTests.class }) public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplicationSyncUpTool { - private static final Log LOG = LogFactory - .getLog(TestReplicationSyncUpToolWithBulkLoadedData.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestReplicationSyncUpToolWithBulkLoadedData.class); @BeforeClass public static void setUpBeforeClass() throws Exception { diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index 19556e496c4..63a0577bf78 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -158,8 +158,8 @@ commons-io - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.zookeeper diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java index 67aba62fc6c..63d00fb27e7 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java @@ -25,8 +25,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.IntStream; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.AsyncConnection; @@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple example shows how to use asynchronous client. */ public class AsyncClientExample extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(AsyncClientExample.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncClientExample.class); /** * The size for thread pool. diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java index 7b11684b4e3..8e8a8288701 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java @@ -18,20 +18,6 @@ */ package org.apache.hadoop.hbase.client.example; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.BufferedMutator; -import org.apache.hadoop.hbase.client.BufferedMutatorParams; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -43,12 +29,26 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.BufferedMutator; +import org.apache.hadoop.hbase.client.BufferedMutatorParams; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * An example of using the {@link BufferedMutator} interface. */ public class BufferedMutatorExample extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(BufferedMutatorExample.class); + private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorExample.class); private static final int POOL_SIZE = 10; private static final int TASK_COUNT = 100; diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java index e460316515b..53472bb8d14 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java @@ -18,7 +18,6 @@ */ package org.apache.hadoop.hbase.client.example; -import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -31,8 +30,6 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; @@ -50,7 +47,10 @@ import org.apache.hadoop.hbase.filter.KeyOnlyFilter; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Example on how to use HBase's {@link Connection} and {@link Table} in a @@ -90,7 +90,7 @@ import org.apache.hadoop.util.ToolRunner; * */ public class MultiThreadedClientExample extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(MultiThreadedClientExample.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedClientExample.class); private static final int DEFAULT_NUM_OPERATIONS = 500000; /** diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java index 0401959b68f..a829b2ab1b1 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.client.example; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.Closeable; import java.io.IOException; @@ -40,7 +40,7 @@ import java.io.IOException; * Region Server side via the RefreshHFilesService. */ public class RefreshHFilesClient implements Closeable { - private static final Log LOG = LogFactory.getLog(RefreshHFilesClient.class); + private static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesClient.class); private final Connection connection; /** diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java index 233ea18ead1..4735b3db72b 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java @@ -25,8 +25,6 @@ import java.util.List; import java.util.Set; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.regionserver.OperationStatus; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -97,7 +97,7 @@ import com.google.protobuf.Service; */ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCoprocessor { private static final String NO_OF_VERSIONS_TO_DELETE = "noOfVersionsToDelete"; - private static final Log LOG = LogFactory.getLog(BulkDeleteEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(BulkDeleteEndpoint.class); private RegionCoprocessorEnvironment env; @@ -167,7 +167,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro } } } catch (IOException ioe) { - LOG.error(ioe); + LOG.error(ioe.toString(), ioe); // Call ServerRpcController#getFailedOn() to retrieve this IOException at client side. CoprocessorRpcUtils.setControllerException(controller, ioe); } finally { @@ -175,7 +175,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCopro try { scanner.close(); } catch (IOException ioe) { - LOG.error(ioe); + LOG.error(ioe.toString(), ioe); } } } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java index c27672cf9d3..e916cb38f6d 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.coprocessor.example; import java.io.IOException; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.metrics.Counter; import org.apache.hadoop.hbase.metrics.Gauge; import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.metrics.Timer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An example coprocessor that collects some metrics to demonstrate the usage of exporting custom @@ -53,7 +53,7 @@ public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, Mast return Optional.of(this); } - private static final Log LOG = LogFactory.getLog(ExampleMasterObserverWithMetrics.class); + private static final Logger LOG = LoggerFactory.getLogger(ExampleMasterObserverWithMetrics.class); /** This is the Timer metric object to keep track of the current count across invocations */ private Timer createTableTimer; diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java index 71d40d43ecd..60cb1542fdf 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java @@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.coprocessor.example; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.Service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; @@ -30,6 +29,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; import org.apache.hadoop.hbase.regionserver.Store; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collections; @@ -44,7 +45,7 @@ import java.util.Collections; */ public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesService implements RegionCoprocessor { - protected static final Log LOG = LogFactory.getLog(RefreshHFilesEndpoint.class); + protected static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesEndpoint.class); private RegionCoprocessorEnvironment env; public RefreshHFilesEndpoint() { diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java index 46336d53b7e..77f98999ead 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -55,10 +53,12 @@ import org.apache.hadoop.hbase.wal.WAL; import org.junit.After; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) public class TestRefreshHFilesEndpoint { - private static final Log LOG = LogFactory.getLog(TestRefreshHFilesEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRefreshHFilesEndpoint.class); private static final HBaseTestingUtility HTU = new HBaseTestingUtility(); private static final int NUM_MASTER = 1; private static final int NUM_RS = 2; @@ -128,7 +128,7 @@ public class TestRefreshHFilesEndpoint { if (rex.getCause() instanceof IOException) throw new IOException(); } catch (Throwable ex) { - LOG.error(ex); + LOG.error(ex.toString(), ex); fail("Couldn't call the RefreshRegionHFilesEndpoint"); } } diff --git a/hbase-external-blockcache/pom.xml b/hbase-external-blockcache/pom.xml index 8b4aa269755..1163e1426da 100644 --- a/hbase-external-blockcache/pom.xml +++ b/hbase-external-blockcache/pom.xml @@ -155,8 +155,8 @@ true - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.htrace diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java index b12ac1dd632..a5236633812 100644 --- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java +++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java @@ -29,8 +29,6 @@ import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType; @@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.hadoop.hbase.util.Addressing; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import net.spy.memcached.CachedData; import net.spy.memcached.ConnectionFactoryBuilder; @@ -55,7 +55,7 @@ import net.spy.memcached.transcoders.Transcoder; */ @InterfaceAudience.Private public class MemcachedBlockCache implements BlockCache { - private static final Log LOG = LogFactory.getLog(MemcachedBlockCache.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MemcachedBlockCache.class.getName()); // Some memcache versions won't take more than 1024 * 1024. So set the limit below // that just in case this client is used with those versions. diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index 8b4e174a77b..684086c2df1 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -88,8 +88,8 @@ - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.hbase diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java index 83053164e59..5c1f1035f5b 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java @@ -18,18 +18,18 @@ package org.apache.hadoop.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import java.util.Iterator; import java.util.ServiceLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * Class that will create many instances of classes provided by the hbase-hadoop{1|2}-compat jars. */ public class CompatibilityFactory { - private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class); public static final String EXCEPTION_START = "Could not create "; public static final String EXCEPTION_END = " Is the hadoop compatibility jar on the classpath?"; @@ -54,7 +54,7 @@ public class CompatibilityFactory { msg.append(it.next()).append(" "); } msg.append("}"); - LOG.warn(msg); + LOG.warn(msg.toString()); } } catch (Exception e) { throw new RuntimeException(createExceptionString(klass), e); diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java index be6d6d1809c..3dc3f496558 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.ServiceLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * Factory for classes supplied by hadoop compatibility modules. Only one of each class will be * created. @@ -36,7 +36,7 @@ public class CompatibilitySingletonFactory extends CompatibilityFactory { private final Object lock = new Object(); private final Map instances = new HashMap<>(); } - private static final Log LOG = LogFactory.getLog(CompatibilitySingletonFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class); /** * This is a static only class don't let anyone create an instance. @@ -67,7 +67,7 @@ public class CompatibilitySingletonFactory extends CompatibilityFactory { msg.append(it.next()).append(" "); } msg.append("}"); - LOG.warn(msg); + LOG.warn(msg.toString()); } } catch (Exception e) { throw new RuntimeException(createExceptionString(klass), e); diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml index 6a1ab05da7c..f97c535d450 100644 --- a/hbase-hadoop2-compat/pom.xml +++ b/hbase-hadoop2-compat/pom.xml @@ -169,8 +169,8 @@ limitations under the License. commons-lang3 - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.hbase.thirdparty diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java index 0400f7f2c9f..69892851bb7 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/mapreduce/JobUtil.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Cluster; @@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.JobSubmissionFiles; @InterfaceAudience.Private @InterfaceStability.Evolving public abstract class JobUtil { - private static final Log LOG = LogFactory.getLog(JobUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(JobUtil.class); protected JobUtil() { super(); diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java index 7db26a512b4..0ad5d14bcae 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java @@ -26,8 +26,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.metrics.MetricRegistries; import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.metrics.MetricRegistryInfo; @@ -38,7 +36,8 @@ import org.apache.hadoop.metrics2.impl.JmxCacheBuster; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper; import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ public class GlobalMetricRegistriesAdapter { - private static final Log LOG = LogFactory.getLog(GlobalMetricRegistriesAdapter.class); + private static final Logger LOG = LoggerFactory.getLogger(GlobalMetricRegistriesAdapter.class); private class MetricsSourceAdapter implements MetricsSource { private final MetricRegistry registry; diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java index 3a24b94de57..b6a17cf177d 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.java @@ -35,8 +35,6 @@ package org.apache.hadoop.hbase.metrics.impl; import java.util.Map; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.metrics.Counter; import org.apache.hadoop.hbase.metrics.Gauge; import org.apache.hadoop.hbase.metrics.Histogram; @@ -50,6 +48,8 @@ import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.lib.Interns; import org.apache.hadoop.metrics2.lib.MutableHistogram; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is the adapter from "HBase Metrics Framework", implemented in hbase-metrics-api and @@ -60,8 +60,8 @@ import org.apache.hadoop.metrics2.lib.MutableHistogram; * Some of the code is forked from https://github.com/joshelser/dropwizard-hadoop-metrics2. */ public class HBaseMetrics2HadoopMetricsAdapter { - private static final Log LOG - = LogFactory.getLog(HBaseMetrics2HadoopMetricsAdapter.class); + private static final Logger LOG + = LoggerFactory.getLogger(HBaseMetrics2HadoopMetricsAdapter.class); private static final String EMPTY_STRING = ""; public HBaseMetrics2HadoopMetricsAdapter() { diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java index b03549dfa02..cb78ccf183b 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionAggregateSourceImpl.java @@ -23,9 +23,9 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.Interns; import org.apache.hadoop.metrics2.MetricsCollector; @@ -37,7 +37,7 @@ import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl; public class MetricsRegionAggregateSourceImpl extends BaseSourceImpl implements MetricsRegionAggregateSource { - private static final Log LOG = LogFactory.getLog(MetricsRegionAggregateSourceImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionAggregateSourceImpl.class); private final MetricsExecutorImpl executor = new MetricsExecutorImpl(); diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java index 09175d5dbe7..8f11811a94f 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.metrics.Interns; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry; @@ -31,7 +31,7 @@ import org.apache.hadoop.metrics2.lib.MutableFastCounter; @InterfaceAudience.Private public class MetricsRegionSourceImpl implements MetricsRegionSource { - private static final Log LOG = LogFactory.getLog(MetricsRegionSourceImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionSourceImpl.class); private AtomicBoolean closed = new AtomicBoolean(false); diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java index 5ef8d812462..588986e7c4d 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableAggregateSourceImpl.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.Interns; import org.apache.hadoop.metrics2.MetricsCollector; @@ -32,7 +32,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder; public class MetricsTableAggregateSourceImpl extends BaseSourceImpl implements MetricsTableAggregateSource { - private static final Log LOG = LogFactory.getLog(MetricsTableAggregateSourceImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsTableAggregateSourceImpl.class); private ConcurrentHashMap tableSources = new ConcurrentHashMap<>(); public MetricsTableAggregateSourceImpl() { diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java index f95eb4c5978..2269d9ab93a 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableSourceImpl.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.regionserver; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.metrics.Interns; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry; @@ -31,7 +31,7 @@ import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry; @InterfaceAudience.Private public class MetricsTableSourceImpl implements MetricsTableSource { - private static final Log LOG = LogFactory.getLog(MetricsTableSourceImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsTableSourceImpl.class); private AtomicBoolean closed = new AtomicBoolean(false); diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java index 00763c67d04..dc5608014e1 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java @@ -22,9 +22,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.metrics2.MetricsExecutor; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl; @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class JmxCacheBuster { - private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class); + private static final Logger LOG = LoggerFactory.getLogger(JmxCacheBuster.class); private static AtomicReference fut = new AtomicReference<>(null); private static MetricsExecutor executor = new MetricsExecutorImpl(); private static AtomicBoolean stopped = new AtomicBoolean(false); diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java index eb465c38c8b..3ecd8887efe 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystemHelper.java @@ -20,13 +20,12 @@ package org.apache.hadoop.metrics2.lib; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.HashMap; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class DefaultMetricsSystemHelper { - private static final Log LOG = LogFactory.getLog(DefaultMetricsSystemHelper.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultMetricsSystemHelper.class); private final Method removeObjectMethod; private final Field sourceNamesField; private final Field mapField; @@ -49,7 +48,7 @@ public class DefaultMetricsSystemHelper { f2 = UniqueNames.class.getDeclaredField("map"); f2.setAccessible(true); } catch (NoSuchFieldException e) { - LOG.trace(e); + LOG.trace(e.toString(), e); f1 = null; f2 = null; } diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java index a7d221b3b58..0b8111bb65c 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java @@ -21,9 +21,9 @@ package org.apache.hadoop.metrics2.lib; import java.util.Collection; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.metrics.Interns; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsInfo; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; */ @InterfaceAudience.Private public class DynamicMetricsRegistry { - private static final Log LOG = LogFactory.getLog(DynamicMetricsRegistry.class); + private static final Logger LOG = LoggerFactory.getLogger(DynamicMetricsRegistry.class); private final ConcurrentMap metricsMap = Maps.newConcurrentMap(); diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml index 69baef87d3e..709395692a1 100644 --- a/hbase-http/pom.xml +++ b/hbase-http/pom.xml @@ -208,7 +208,6 @@ org.apache.hbase hbase-resource-bundle - ${project.version} true @@ -249,8 +248,8 @@ - commons-logging - commons-logging + org.slf4j + slf4j-api log4j diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java index cfc0640dadb..243da1b14f6 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpRequestLog.java @@ -19,22 +19,22 @@ package org.apache.hadoop.hbase.http; import java.util.HashMap; -import org.apache.commons.logging.impl.Log4JLogger; -import org.apache.commons.logging.Log; import org.apache.commons.logging.LogConfigurationException; -import org.apache.commons.logging.LogFactory; +import org.apache.commons.logging.impl.Log4JLogger; import org.apache.log4j.Appender; -import org.apache.log4j.Logger; - -import org.eclipse.jetty.server.RequestLog; +import org.apache.log4j.LogManager; import org.eclipse.jetty.server.NCSARequestLog; +import org.eclipse.jetty.server.RequestLog; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.impl.Log4jLoggerAdapter; /** * RequestLog object for use with Http */ public class HttpRequestLog { - private static final Log LOG = LogFactory.getLog(HttpRequestLog.class); + private static final Logger LOG = LoggerFactory.getLogger(HttpRequestLog.class); private static final HashMap serverToComponent; static { @@ -43,6 +43,19 @@ public class HttpRequestLog { serverToComponent.put("region", "regionserver"); } + private static org.apache.log4j.Logger getLog4jLogger(String loggerName) { + Logger logger = LoggerFactory.getLogger(loggerName); + + if (logger instanceof Log4JLogger) { + Log4JLogger httpLog4JLog = (Log4JLogger)logger; + return httpLog4JLog.getLogger(); + } else if (logger instanceof Log4jLoggerAdapter) { + return LogManager.getLogger(loggerName); + } else { + return null; + } + } + public static RequestLog getRequestLog(String name) { String lookup = serverToComponent.get(name); @@ -51,43 +64,41 @@ public class HttpRequestLog { } String loggerName = "http.requests." + name; String appenderName = name + "requestlog"; - Log logger = LogFactory.getLog(loggerName); - if (logger instanceof Log4JLogger) { - Log4JLogger httpLog4JLog = (Log4JLogger)logger; - Logger httpLogger = httpLog4JLog.getLogger(); - Appender appender = null; + org.apache.log4j.Logger httpLogger = getLog4jLogger(loggerName); - try { - appender = httpLogger.getAppender(appenderName); - } catch (LogConfigurationException e) { - LOG.warn("Http request log for " + loggerName - + " could not be created"); - throw e; - } - - if (appender == null) { - LOG.info("Http request log for " + loggerName - + " is not defined"); - return null; - } - - if (appender instanceof HttpRequestLogAppender) { - HttpRequestLogAppender requestLogAppender - = (HttpRequestLogAppender)appender; - NCSARequestLog requestLog = new NCSARequestLog(); - requestLog.setFilename(requestLogAppender.getFilename()); - requestLog.setRetainDays(requestLogAppender.getRetainDays()); - return requestLog; - } else { - LOG.warn("Jetty request log for " + loggerName - + " was of the wrong class"); - return null; - } - } - else { + if (httpLogger == null) { LOG.warn("Jetty request log can only be enabled using Log4j"); return null; } + + Appender appender = null; + + try { + appender = httpLogger.getAppender(appenderName); + } catch (LogConfigurationException e) { + LOG.warn("Http request log for " + loggerName + + " could not be created"); + throw e; + } + + if (appender == null) { + LOG.info("Http request log for " + loggerName + + " is not defined"); + return null; + } + + if (appender instanceof HttpRequestLogAppender) { + HttpRequestLogAppender requestLogAppender + = (HttpRequestLogAppender)appender; + NCSARequestLog requestLog = new NCSARequestLog(); + requestLog.setFilename(requestLogAppender.getFilename()); + requestLog.setRetainDays(requestLogAppender.getRetainDays()); + return requestLog; + } else { + LOG.warn("Jetty request log for " + loggerName + + " was of the wrong class"); + return null; + } } } diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index c2b5944ba2d..087a33f82e6 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -47,8 +47,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -93,6 +91,8 @@ import org.eclipse.jetty.webapp.WebAppContext; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Create a Jetty embedded server to answer http requests. The primary goal @@ -105,7 +105,7 @@ import org.glassfish.jersey.servlet.ServletContainer; @InterfaceAudience.Private @InterfaceStability.Evolving public class HttpServer implements FilterContainer { - private static final Log LOG = LogFactory.getLog(HttpServer.class); + private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class); private static final String EMPTY_STRING = ""; private static final int DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java index 2e43be26776..13d6c400acc 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/jmx/JMXJsonServlet.java @@ -31,10 +31,10 @@ import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.hbase.util.JSONBean; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /* * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has @@ -111,7 +111,7 @@ import org.apache.hadoop.hbase.util.JSONBean; * */ public class JMXJsonServlet extends HttpServlet { - private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class); + private static final Logger LOG = LoggerFactory.getLogger(JMXJsonServlet.class); private static final long serialVersionUID = 1L; diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java index a1fa9f00a02..72cedddd686 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/lib/StaticUserWebFilter.java @@ -17,10 +17,14 @@ */ package org.apache.hadoop.hbase.http.lib; +import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE_HTTP_STATIC_USER; +import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.HBASE_HTTP_STATIC_USER; + import java.io.IOException; import java.security.Principal; import java.util.HashMap; +import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; @@ -29,18 +33,13 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.http.FilterContainer; import org.apache.hadoop.hbase.http.FilterInitializer; - -import javax.servlet.Filter; - -import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.HBASE_HTTP_STATIC_USER; -import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE_HTTP_STATIC_USER; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides a servlet filter that pretends to authenticate a fake user (Dr.Who) @@ -50,7 +49,7 @@ import static org.apache.hadoop.hbase.http.ServerConfigurationKeys.DEFAULT_HBASE public class StaticUserWebFilter extends FilterInitializer { static final String DEPRECATED_UGI_KEY = "dfs.web.ugi"; - private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(StaticUserWebFilter.class); static class User implements Principal { private final String name; diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java index e23eecdbad7..0d409726e97 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java @@ -30,14 +30,16 @@ import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Jdk14Logger; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.impl.Log4jLoggerAdapter; import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.util.ServletUtil; +import org.apache.log4j.LogManager; /** * Change log level in runtime. @@ -115,7 +117,7 @@ public class LogLevel { out.println(MARKER + "Submitted Log Name: " + logName + "
"); - Log log = LogFactory.getLog(logName); + Logger log = LoggerFactory.getLogger(logName); out.println(MARKER + "Log Class: " + log.getClass().getName() +"
"); if (level != null) { @@ -124,11 +126,11 @@ public class LogLevel { if (log instanceof Log4JLogger) { process(((Log4JLogger)log).getLogger(), level, out); - } - else if (log instanceof Jdk14Logger) { + } else if (log instanceof Jdk14Logger) { process(((Jdk14Logger)log).getLogger(), level, out); - } - else { + } else if (log instanceof Log4jLoggerAdapter) { + process(LogManager.getLogger(logName), level, out); + } else { out.println("Sorry, " + log.getClass() + " not supported.
"); } } diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java index 729dd06b4cc..f2461a16aa8 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestGlobalFilter.java @@ -33,18 +33,18 @@ import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.net.NetUtils; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestGlobalFilter extends HttpServerFunctionalTest { - private static final Log LOG = LogFactory.getLog(HttpServer.class); + private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class); static final Set RECORDS = new TreeSet<>(); /** A very simple filter that records accessed uri's */ diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java index fddb2a4842c..68c075229ba 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java @@ -45,8 +45,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter; @@ -67,10 +65,12 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestHttpServer extends HttpServerFunctionalTest { - private static final Log LOG = LogFactory.getLog(TestHttpServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHttpServer.class); private static HttpServer server; private static URL baseUrl; // jetty 9.4.x needs this many threads to start, even in the small. diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java index db394a8d7eb..3edb12c4f76 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServerWebapps.java @@ -21,9 +21,8 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.FileNotFoundException; /** @@ -31,7 +30,7 @@ import java.io.FileNotFoundException; */ @Category({MiscTests.class, SmallTests.class}) public class TestHttpServerWebapps extends HttpServerFunctionalTest { - private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class); + private static final Logger log = LoggerFactory.getLogger(TestHttpServerWebapps.class); /** * Test that the test server is loadable on the classpath diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java index 5eff2b4fb3f..cffb4c3c09f 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestPathFilter.java @@ -33,18 +33,18 @@ import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.net.NetUtils; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestPathFilter extends HttpServerFunctionalTest { - private static final Log LOG = LogFactory.getLog(HttpServer.class); + private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class); static final Set RECORDS = new TreeSet<>(); /** A very simple filter that records accessed uri's */ diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java index b599350cc66..282530f0ec9 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java @@ -25,8 +25,6 @@ import java.net.URL; import javax.net.ssl.HttpsURLConnection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -40,6 +38,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This testcase issues SSL certificates configures the HttpServer to serve @@ -51,7 +51,7 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest { private static final String BASEDIR = System.getProperty("test.build.dir", "target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName(); - private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSSLHttpServer.class); private static Configuration conf; private static HttpServer server; private static URL baseUrl; diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java index 32bc03ed2b0..756487199c5 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestServletFilter.java @@ -32,8 +32,6 @@ import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -43,10 +41,12 @@ import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestServletFilter extends HttpServerFunctionalTest { - private static final Log LOG = LogFactory.getLog(HttpServer.class); + private static final Logger LOG = LoggerFactory.getLogger(HttpServer.class); static volatile String uri = null; /** A very simple filter which record the uri filtered. */ diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java index 4fad03139ae..13e2519da5c 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java @@ -16,10 +16,8 @@ */ package org.apache.hadoop.hbase.http; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.security.Principal; @@ -29,8 +27,6 @@ import java.util.Set; import javax.security.auth.Subject; import javax.security.auth.kerberos.KerberosTicket; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet; import org.apache.hadoop.hbase.http.resource.JerseyResource; @@ -48,8 +44,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.config.Lookup; import org.apache.http.config.RegistryBuilder; -import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.entity.ContentType; import org.apache.http.impl.auth.SPNegoSchemeFactory; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClients; @@ -65,6 +59,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test class for SPNEGO authentication on the HttpServer. Uses Kerby's MiniKDC and Apache @@ -72,7 +68,7 @@ import org.junit.experimental.categories.Category; */ @Category({MiscTests.class, SmallTests.class}) public class TestSpnegoHttpServer extends HttpServerFunctionalTest { - private static final Log LOG = LogFactory.getLog(TestSpnegoHttpServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSpnegoHttpServer.class); private static final String KDC_SERVER_HOST = "localhost"; private static final String CLIENT_PRINCIPAL = "client"; diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java index 484162af55c..9a6399d5935 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/jmx/TestJMXJsonServlet.java @@ -25,8 +25,6 @@ import java.util.regex.Pattern; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.http.HttpServer; @@ -35,10 +33,12 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestJMXJsonServlet extends HttpServerFunctionalTest { - private static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class); + private static final Logger LOG = LoggerFactory.getLogger(TestJMXJsonServlet.class); private static HttpServer server; private static URL baseUrl; diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java index e14e3b4846f..84d2493ba36 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java @@ -19,18 +19,23 @@ package org.apache.hadoop.hbase.http.log; import static org.junit.Assert.assertTrue; -import java.io.*; -import java.net.*; +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.io.PrintStream; +import java.net.URI; +import java.net.URL; +import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.net.NetUtils; -import org.apache.commons.logging.*; -import org.apache.commons.logging.impl.*; -import org.apache.log4j.*; +import org.apache.log4j.Level; +import org.apache.log4j.LogManager; +import org.apache.log4j.Logger; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.LoggerFactory; +import org.slf4j.impl.Log4jLoggerAdapter; @Category({MiscTests.class, SmallTests.class}) public class TestLogLevel { @@ -40,11 +45,11 @@ public class TestLogLevel { @SuppressWarnings("deprecation") public void testDynamicLogLevel() throws Exception { String logName = TestLogLevel.class.getName(); - Log testlog = LogFactory.getLog(logName); + org.slf4j.Logger testlog = LoggerFactory.getLogger(logName); //only test Log4JLogger - if (testlog instanceof Log4JLogger) { - Logger log = ((Log4JLogger)testlog).getLogger(); + if (testlog instanceof Log4jLoggerAdapter) { + Logger log = LogManager.getLogger(logName); log.debug("log.debug1"); log.info("log.info1"); log.error("log.error1"); diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java index bf0e609192e..31ff0ed4e0d 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/resource/JerseyResource.java @@ -30,9 +30,9 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.eclipse.jetty.util.ajax.JSON; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple Jersey resource class TestHttpServer. @@ -41,7 +41,7 @@ import org.eclipse.jetty.util.ajax.JSON; */ @Path("") public class JerseyResource { - private static final Log LOG = LogFactory.getLog(JerseyResource.class); + private static final Logger LOG = LoggerFactory.getLogger(JerseyResource.class); public static final String PATH = "path"; public static final String OP = "op"; diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml index 1643a640102..8cfbd3bf071 100644 --- a/hbase-it/pom.xml +++ b/hbase-it/pom.xml @@ -231,8 +231,8 @@ metrics-core
- commons-logging - commons-logging + org.slf4j + slf4j-api commons-cli diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java index e8a00416e57..6546ac9288e 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java @@ -22,6 +22,7 @@ import java.util.ArrayList; import java.util.Comparator; import java.util.HashSet; import java.util.List; +import java.util.Objects; import java.util.Set; import java.util.TreeSet; @@ -422,7 +423,7 @@ public class DistributedHBaseCluster extends HBaseCluster { LOG.warn("Restoring cluster - restoring region servers reported " + deferred.size() + " errors:"); for (int i=0; i columnFamilies; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java index 70452bb5568..a49f54117e0 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java @@ -23,18 +23,18 @@ import java.util.Collection; import java.util.List; import org.apache.commons.lang3.RandomUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.chaos.policies.Policy; import org.apache.hadoop.hbase.util.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Chaos monkey that given multiple policies will run actions against the cluster. */ public class PolicyBasedChaosMonkey extends ChaosMonkey { - private static final Log LOG = LogFactory.getLog(PolicyBasedChaosMonkey.class); + private static final Logger LOG = LoggerFactory.getLogger(PolicyBasedChaosMonkey.class); private static final long ONE_SEC = 1000; private static final long FIVE_SEC = 5 * ONE_SEC; private static final long ONE_MIN = 60 * ONE_SEC; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java index 6b365f81a51..81267a65688 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java @@ -18,18 +18,18 @@ package org.apache.hadoop.hbase.chaos.policies; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.chaos.actions.Action; import org.apache.hadoop.hbase.util.StoppableImplementation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A policy to introduce chaos to the cluster */ public abstract class Policy extends StoppableImplementation implements Runnable { - protected static final Log LOG = LogFactory.getLog(Policy.class); + protected static final Logger LOG = LoggerFactory.getLogger(Policy.class); protected PolicyContext context; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java index d72111f0ef2..8385c156285 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java @@ -23,8 +23,6 @@ import java.util.Set; import org.apache.commons.cli.CommandLine; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -34,11 +32,12 @@ import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory; import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.util.ToolRunner; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; public class ChaosMonkeyRunner extends AbstractHBaseTool { - private static final Log LOG = LogFactory.getLog(ChaosMonkeyRunner.class); + private static final Logger LOG = LoggerFactory.getLogger(ChaosMonkeyRunner.class); public static final String MONKEY_LONG_OPT = "monkey"; public static final String CHAOS_MONKEY_PROPS = "monkeyProps"; @@ -75,7 +74,7 @@ public class ChaosMonkeyRunner extends AbstractHBaseTool { monkeyProps.load(this.getClass().getClassLoader() .getResourceAsStream(chaosMonkeyPropsFile)); } catch (IOException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); System.exit(EXIT_FAILURE); } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java index 1ce43565692..cb29427e9d3 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java @@ -24,11 +24,12 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.IntegrationTestingUtility; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -36,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa * This class can be used to control chaos monkeys life cycle. */ public class Monkeys implements Closeable { - private static final Log LOG = LogFactory.getLog(Monkeys.class); + private static final Logger LOG = LoggerFactory.getLogger(Monkeys.class); private final Configuration conf; private final ChaosMonkeyRunner monkeyRunner; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java index 27a2d8506fa..9754d4eac68 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java @@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.codec.Codec; @@ -51,13 +49,14 @@ import org.apache.hadoop.hbase.util.Threads; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(IntegrationTests.class) public class IntegrationTestRpcClient { - private static final Log LOG = LogFactory.getLog(IntegrationTestRpcClient.class); + private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestRpcClient.class); private final Configuration conf; @@ -203,7 +202,7 @@ public class IntegrationTestRpcClient { try { cluster.startServer(); } catch (Exception e) { - LOG.warn(e); + LOG.warn(e.toString(), e); exception.compareAndSet(null, e); } } else { @@ -211,7 +210,7 @@ public class IntegrationTestRpcClient { try { cluster.stopRandomServer(); } catch (Exception e) { - LOG.warn(e); + LOG.warn(e.toString(), e); exception.compareAndSet(null, e); } } @@ -261,7 +260,7 @@ public class IntegrationTestRpcClient { BlockingInterface stub = newBlockingStub(rpcClient, server.getListenerAddress()); ret = stub.echo(null, param); } catch (Exception e) { - LOG.warn(e); + LOG.warn(e.toString(), e); continue; // expected in case connection is closing or closed } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java index 3fa1054d954..2588e635a16 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java @@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.cli.CommandLine; import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -88,7 +86,8 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @@ -128,7 +127,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @Category(IntegrationTests.class) public class IntegrationTestBulkLoad extends IntegrationTestBase { - private static final Log LOG = LogFactory.getLog(IntegrationTestBulkLoad.class); + private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestBulkLoad.class); private static final byte[] CHAIN_FAM = Bytes.toBytes("L"); private static final byte[] SORT_FAM = Bytes.toBytes("S"); @@ -197,7 +196,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase { Thread.sleep(sleepTime.get()); } } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java index dfc54e0b338..ab5f2bb8274 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java @@ -30,8 +30,6 @@ import java.util.Map; import java.util.Set; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -58,6 +56,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Validate ImportTsv + LoadIncrementalHFiles on a distributed cluster. @@ -66,7 +66,7 @@ import org.junit.rules.TestName; public class IntegrationTestImportTsv extends Configured implements Tool { private static final String NAME = IntegrationTestImportTsv.class.getSimpleName(); - private static final Log LOG = LogFactory.getLog(IntegrationTestImportTsv.class); + private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestImportTsv.class); protected static final String simple_tsv = "row1\t1\tc1\tc2\n" + diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java index 2df1c4bff18..065cec916f5 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -34,6 +32,8 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.After; import org.junit.Before; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An integration test to test {@link TableSnapshotInputFormat} which enables @@ -68,8 +68,8 @@ import org.junit.experimental.categories.Category; @Category(IntegrationTests.class) // Not runnable as a unit test. See TestTableSnapshotInputFormat public class IntegrationTestTableSnapshotInputFormat extends IntegrationTestBase { - - private static final Log LOG = LogFactory.getLog(IntegrationTestTableSnapshotInputFormat.class); + private static final Logger LOG = + LoggerFactory.getLogger(IntegrationTestTableSnapshotInputFormat.class); private static final String TABLE_NAME_KEY = "IntegrationTestTableSnapshotInputFormat.table"; private static final String DEFAULT_TABLE_NAME = "IntegrationTestTableSnapshotInputFormat"; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java index e45baf151c1..bb31ece9730 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java @@ -30,8 +30,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.IntegrationTestingUtility; @@ -74,6 +72,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Integration test that should benchmark how fast HBase can recover from failures. This test starts @@ -121,7 +121,7 @@ public class IntegrationTestMTTR { * Constants. */ private static final byte[] FAMILY = Bytes.toBytes("d"); - private static final Log LOG = LogFactory.getLog(IntegrationTestMTTR.class); + private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestMTTR.class); private static long sleepTime; private static final String SLEEP_TIME_KEY = "hbase.IntegrationTestMTTR.sleeptime"; private static final long SLEEP_TIME_DEFAULT = 60 * 1000l; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java index b10e54a74f6..f5f2ff99f23 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java @@ -19,8 +19,6 @@ */ package org.apache.hadoop.hbase.rsgroup; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.Waiter; @@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.testclassification.IntegrationTests; import org.junit.After; import org.junit.Before; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Runs all of the units tests defined in TestGroupBase as an integration test. @@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category; */ @Category(IntegrationTests.class) public class IntegrationTestRSGroup extends TestRSGroupsBase { - private final static Log LOG = LogFactory.getLog(IntegrationTestRSGroup.class); + private final static Logger LOG = LoggerFactory.getLogger(IntegrationTestRSGroup.class); private static boolean initialized = false; @Before diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index b9cc69d9bfb..826db075522 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -42,8 +42,6 @@ import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -119,7 +117,8 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** @@ -253,7 +252,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { */ static class Generator extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(Generator.class); + private static final Logger LOG = LoggerFactory.getLogger(Generator.class); /** * Set this configuration if you want to test single-column family flush works. If set, we will @@ -854,7 +853,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { * WALs and oldWALs dirs (Some of this is TODO). */ static class Search extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(Search.class); + private static final Logger LOG = LoggerFactory.getLogger(Search.class); protected Job job; private static void printUsage(final String error) { @@ -914,7 +913,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { try { LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey()); } catch (IOException|InterruptedException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); } if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) { context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1); @@ -1016,7 +1015,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { */ static class Verify extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(Verify.class); + private static final Logger LOG = LoggerFactory.getLogger(Verify.class); protected static final BytesWritable DEF = new BytesWritable(new byte[] { 0 }); protected static final BytesWritable DEF_LOST_FAMILIES = new BytesWritable(new byte[] { 1 }); @@ -1455,7 +1454,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { */ static class Loop extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(Loop.class); + private static final Logger LOG = LoggerFactory.getLogger(Loop.class); private static final String USAGE = "Usage: Loop " + " [ " + " ] \n" + diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index cdee14ddb74..d0e6e52bdcf 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -24,8 +24,6 @@ import java.util.Iterator; import java.util.UUID; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -50,6 +48,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.mapreduce.Import; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.security.User; @@ -72,6 +71,8 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * IT test used to verify the deletes with visibility labels. @@ -123,7 +124,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB private static String userName = "user1"; static class VisibilityGenerator extends Generator { - private static final Log LOG = LogFactory.getLog(VisibilityGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityGenerator.class); @Override protected void createSchema() throws IOException { @@ -162,7 +163,8 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB AccessControlClient.grant(ConnectionFactory.createConnection(getConf()), tableName, USER.getShortName(), null, null, actions); } catch (Throwable e) { - LOG.fatal("Error in granting permission for the user " + USER.getShortName(), e); + LOG.error(HBaseMarkers.FATAL, "Error in granting permission for the user " + + USER.getShortName(), e); throw new IOException(e); } } @@ -239,7 +241,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB } static class Copier extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(Copier.class); + private static final Logger LOG = LoggerFactory.getLogger(Copier.class); private TableName tableName; private int labelIndex; private boolean delete; @@ -395,7 +397,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB } static class VisibilityVerify extends Verify { - private static final Log LOG = LogFactory.getLog(VisibilityVerify.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityVerify.class); private TableName tableName; private int labelIndex; @@ -475,7 +477,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB static class VisibilityLoop extends Loop { private static final int SLEEP_IN_MS = 5000; - private static final Log LOG = LogFactory.getLog(VisibilityLoop.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityLoop.class); IntegrationTestBigLinkedListWithVisibility it; @Override diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index ce86fc2ed06..f47ef5035b3 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -27,8 +27,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -84,7 +82,8 @@ import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** @@ -107,7 +106,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @Category(IntegrationTests.class) public class IntegrationTestLoadAndVerify extends IntegrationTestBase { - private static final Log LOG = LogFactory.getLog(IntegrationTestLoadAndVerify.class); + private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestLoadAndVerify.class); private static final String TEST_NAME = "IntegrationTestLoadAndVerify"; private static final byte[] TEST_FAMILY = Bytes.toBytes("f1"); @@ -426,7 +425,7 @@ public void cleanUpCluster() throws Exception { try { LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey()); } catch (IOException|InterruptedException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); } if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) { context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java index 34af01b30b8..333232e7060 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.test; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; @@ -165,7 +165,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { * {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Loop} */ protected class VerifyReplicationLoop extends Configured implements Tool { - private final Log LOG = LogFactory.getLog(VerifyReplicationLoop.class); + private final Logger LOG = LoggerFactory.getLogger(VerifyReplicationLoop.class); protected ClusterID source; protected ClusterID sink; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java index 73374238602..52f6566e2a5 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java @@ -27,8 +27,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.RandomUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionLocation; @@ -53,7 +51,8 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.junit.Assert; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -95,7 +94,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(IntegrationTests.class) public class IntegrationTestTimeBoundedRequestsWithRegionReplicas extends IntegrationTestIngest { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( IntegrationTestTimeBoundedRequestsWithRegionReplicas.class); private static final String TEST_NAME diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java index 09cbda3e1b0..6629f89ffc0 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.List; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -51,6 +49,8 @@ import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Id; import org.apache.zookeeper.data.Stat; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An integration test which checks that the znodes in zookeeper and data in the FileSystem @@ -68,8 +68,9 @@ import org.junit.experimental.categories.Category; */ @Category(IntegrationTests.class) public class IntegrationTestZKAndFSPermissions extends AbstractHBaseTool { + private static final Logger LOG = + LoggerFactory.getLogger(IntegrationTestZKAndFSPermissions.class); - private static final Log LOG = LogFactory.getLog(IntegrationTestZKAndFSPermissions.class); private String superUser; private String masterPrincipal; private boolean isForce; diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml index facdf3fa6a9..e33694d6419 100644 --- a/hbase-mapreduce/pom.xml +++ b/hbase-mapreduce/pom.xml @@ -261,8 +261,8 @@ commons-lang3 - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.zookeeper diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java index 91ef71404da..b0674bf4013 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java @@ -19,12 +19,11 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionLocator; @@ -44,28 +43,30 @@ import org.apache.hadoop.mapred.Partitioner; @InterfaceAudience.Public public class HRegionPartitioner implements Partitioner { - private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class); + private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class); // Connection and locator are not cleaned up; they just die when partitioner is done. private Connection connection; private RegionLocator locator; private byte[][] startKeys; + @Override public void configure(JobConf job) { try { this.connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job)); TableName tableName = TableName.valueOf(job.get(TableOutputFormat.OUTPUT_TABLE)); this.locator = this.connection.getRegionLocator(tableName); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } try { this.startKeys = this.locator.getStartKeys(); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } } + @Override public int getPartition(ImmutableBytesWritable key, V2 value, int numPartitions) { byte[] region = null; // Only one region return 0 @@ -77,7 +78,7 @@ implements Partitioner { // here if a region splits while mapping region = locator.getRegionLocation(key.get()).getRegionInfo().getStartKey(); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } for (int i = 0; i < this.startKeys.length; i++){ if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){ diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java index 3460fe7df80..ba1df4c3a83 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; import java.util.Iterator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.mapred.MapReduceBase; @@ -38,8 +38,8 @@ public class IdentityTableReduce extends MapReduceBase implements TableReduce { @SuppressWarnings("unused") - private static final Log LOG = - LogFactory.getLog(IdentityTableReduce.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(IdentityTableReduce.class.getName()); /** * No aggregation, output pairs of (key, record) diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java index cbd72362eec..d9bb66bdf07 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; @@ -39,7 +39,7 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Public public class TableInputFormat extends TableInputFormatBase implements JobConfigurable { - private static final Log LOG = LogFactory.getLog(TableInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(TableInputFormat.class); /** * space delimited list of columns diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java index 48ee763cf23..509972e92aa 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred; import java.io.Closeable; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; @@ -79,7 +79,7 @@ import org.apache.hadoop.mapred.Reporter; @InterfaceAudience.Public public abstract class TableInputFormatBase implements InputFormat { - private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TableInputFormatBase.class); private byte [][] inputColumns; private Table table; private RegionLocator regionLocator; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java index 95be24f1951..a49d0ec5c3e 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java @@ -19,10 +19,9 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -43,7 +42,7 @@ import static org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl.LOG_PER_RO */ @InterfaceAudience.Public public class TableRecordReaderImpl { - private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TableRecordReaderImpl.class); private byte [] startRow; private byte [] endRow; @@ -248,7 +247,7 @@ public class TableRecordReaderImpl { long now = System.currentTimeMillis(); LOG.info("Mapper took " + (now-timestamp) + "ms to process " + rowcount + " rows"); - LOG.info(ioe); + LOG.info(ioe.toString(), ioe); String lastRow = lastSuccessfulRow == null ? "null" : Bytes.toStringBinary(lastSuccessfulRow); LOG.info("lastSuccessfulRow=" + lastRow); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index 48cc0d5a071..6d6125f033c 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -72,8 +72,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; */ @InterfaceAudience.Public public class CellCounter extends Configured implements Tool { - private static final Log LOG = - LogFactory.getLog(CellCounter.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(CellCounter.class.getName()); /** diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java index 81af16580c7..2e9e62cf373 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java @@ -23,8 +23,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -49,7 +49,7 @@ import org.apache.hadoop.util.ToolRunner; */ @InterfaceAudience.Public public class CopyTable extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(CopyTable.class); + private static final Logger LOG = LoggerFactory.getLogger(CopyTable.class); final static String NAME = "copytable"; long startTime = 0; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java index 775739faa8d..07f05dd7980 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java @@ -26,12 +26,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.Tag; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Result; @@ -50,7 +50,8 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Private public class DefaultVisibilityExpressionResolver implements VisibilityExpressionResolver { - private static final Log LOG = LogFactory.getLog(DefaultVisibilityExpressionResolver.class); + private static final Logger LOG = + LoggerFactory.getLogger(DefaultVisibilityExpressionResolver.class); private Configuration conf; private final Map labels = new HashMap<>(); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java index 71075372230..34f33983bd3 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java @@ -21,14 +21,15 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.Filter; @@ -48,7 +49,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; */ @InterfaceAudience.Private public final class ExportUtils { - private static final Log LOG = LogFactory.getLog(ExportUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(ExportUtils.class); public static final String RAW_SCAN = "hbase.mapreduce.include.deleted.rows"; public static final String EXPORT_BATCHING = "hbase.export.scanner.batch"; public static final String EXPORT_CACHING = "hbase.export.scanner.caching"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index ffe1c85b1da..9bd05302262 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -40,8 +40,6 @@ import java.util.function.Function; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -91,6 +89,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -105,7 +105,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Public public class HFileOutputFormat2 extends FileOutputFormat { - private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileOutputFormat2.class); static class TableInfo { private TableDescriptor tableDesctiptor; private RegionLocator regionLocator; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java index d8c2314b22e..b48ecf02a0f 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java @@ -19,10 +19,9 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -53,7 +52,7 @@ public class HRegionPartitioner extends Partitioner implements Configurable { - private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class); + private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class); private Configuration conf = null; // Connection and locator are not cleaned up; they just die when partitioner is done. private Connection connection; @@ -86,7 +85,7 @@ implements Configurable { // here if a region splits while mapping region = this.locator.getRegionLocation(key.get()).getRegionInfo().getStartKey(); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } for (int i = 0; i < this.startKeys.length; i++){ if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){ @@ -129,12 +128,12 @@ implements Configurable { TableName tableName = TableName.valueOf(conf.get(TableOutputFormat.OUTPUT_TABLE)); this.locator = this.connection.getRegionLocator(tableName); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } try { this.startKeys = this.locator.getStartKeys(); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java index 2c8caf503a6..e68ac3b3544 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java @@ -27,8 +27,6 @@ import java.util.Collections; import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataInputStream; @@ -56,14 +54,15 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering; public class HashTable extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(HashTable.class); + private static final Logger LOG = LoggerFactory.getLogger(HashTable.class); private static final int DEFAULT_BATCH_SIZE = 8000; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java index 76c1f607be0..876953c862b 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.io.Writable; @@ -53,7 +53,7 @@ public class IdentityTableReducer extends TableReducer { @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(IdentityTableReducer.class); + private static final Logger LOG = LoggerFactory.getLogger(IdentityTableReducer.class); /** * Writes each given record, consisting of the row key and the given values, diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index feebca3db00..c77a9d14623 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -1,4 +1,4 @@ -/** +/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -33,8 +33,6 @@ import java.util.Map; import java.util.TreeMap; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -79,6 +77,8 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -86,7 +86,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.Public public class Import extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(Import.class); + private static final Logger LOG = LoggerFactory.getLogger(Import.class); final static String NAME = "import"; public final static String CF_RENAME_PROP = "HBASE_IMPORTER_RENAME_CFS"; public final static String BULK_OUTPUT_CONF_KEY = "import.bulk.output"; @@ -287,7 +287,7 @@ public class Import extends Configured implements Tool { extends TableMapper { private Map cfRenameMap; private Filter filter; - private static final Log LOG = LogFactory.getLog(KeyValueSortImporter.class); + private static final Logger LOG = LoggerFactory.getLogger(KeyValueSortImporter.class); /** * @param row The current table row key. @@ -352,7 +352,7 @@ public class Import extends Configured implements Tool { public static class KeyValueImporter extends TableMapper { private Map cfRenameMap; private Filter filter; - private static final Log LOG = LogFactory.getLog(KeyValueImporter.class); + private static final Logger LOG = LoggerFactory.getLogger(KeyValueImporter.class); /** * @param row The current table row key. @@ -393,7 +393,7 @@ public class Import extends Configured implements Tool { extends TableMapper { private Map cfRenameMap; private Filter filter; - private static final Log LOG = LogFactory.getLog(CellImporter.class); + private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class); /** * @param row The current table row key. @@ -458,7 +458,7 @@ public class Import extends Configured implements Tool { public static class CellImporter extends TableMapper { private Map cfRenameMap; private Filter filter; - private static final Log LOG = LogFactory.getLog(CellImporter.class); + private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class); /** * @param row The current table row key. diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java index d672803b4bc..678377d5822 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java @@ -27,8 +27,6 @@ import java.util.HashSet; import java.util.Set; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Public public class ImportTsv extends Configured implements Tool { - protected static final Log LOG = LogFactory.getLog(ImportTsv.class); + protected static final Logger LOG = LoggerFactory.getLogger(ImportTsv.class); final static String NAME = "importtsv"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java index b7e94799731..03834f2b8e5 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.mapreduce; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Public @VisibleForTesting public class MultiTableHFileOutputFormat extends HFileOutputFormat2 { - private static final Log LOG = LogFactory.getLog(MultiTableHFileOutputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiTableHFileOutputFormat.class); /** * Creates a composite key to use as a mapper output key when using diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java index 82a86b40475..d8205c1a0ca 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java @@ -22,9 +22,9 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; @@ -55,7 +55,7 @@ import java.util.Iterator; public abstract class MultiTableInputFormatBase extends InputFormat { - private static final Log LOG = LogFactory.getLog(MultiTableInputFormatBase.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiTableInputFormatBase.class); /** Holds the set of scans used to define the input. */ private List scans; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java index 4cf50f23f1e..2a4fae94409 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java @@ -22,9 +22,9 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; @@ -72,7 +72,7 @@ public class MultiTableOutputFormat extends OutputFormat { - private static final Log LOG = LogFactory.getLog(MultiTableRecordWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiTableRecordWriter.class); Connection connection; Map mutatorMap = new HashMap<>(); Configuration conf; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java index b5cba645a80..97fafce99ad 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java @@ -18,22 +18,6 @@ package org.apache.hadoop.hbase.mapreduce; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; -import org.apache.hadoop.hbase.snapshot.SnapshotManifest; -import org.apache.hadoop.hbase.util.ConfigurationUtil; -import org.apache.hadoop.hbase.util.FSUtils; - import java.io.IOException; import java.util.AbstractMap; import java.util.Collection; @@ -41,6 +25,23 @@ import java.util.List; import java.util.Map; import java.util.UUID; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; +import org.apache.hadoop.hbase.snapshot.SnapshotManifest; +import org.apache.hadoop.hbase.util.ConfigurationUtil; +import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; + /** * Shared implementation of mapreduce code over multiple table snapshots. * Utilized by both mapreduce ({@link org.apache.hadoop.hbase.mapreduce @@ -50,8 +51,8 @@ import java.util.UUID; @InterfaceAudience.LimitedPrivate({ "HBase" }) @InterfaceStability.Evolving public class MultiTableSnapshotInputFormatImpl { - - private static final Log LOG = LogFactory.getLog(MultiTableSnapshotInputFormatImpl.class); + private static final Logger LOG = + LoggerFactory.getLogger(MultiTableSnapshotInputFormatImpl.class); public static final String RESTORE_DIRS_KEY = "hbase.MultiTableSnapshotInputFormat.restore.snapshotDirMapping"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java index a5053796640..626deffda40 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java @@ -23,8 +23,6 @@ import java.lang.reflect.Method; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -41,6 +39,8 @@ import org.apache.hadoop.mapreduce.StatusReporter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -58,7 +58,7 @@ import org.apache.hadoop.util.ReflectionUtils; */ public class MultithreadedTableMapper extends TableMapper { - private static final Log LOG = LogFactory.getLog(MultithreadedTableMapper.class); + private static final Logger LOG = LoggerFactory.getLogger(MultithreadedTableMapper.class); private Class> mapClass; private Context outer; private ExecutorService executor; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java index 7da2f9b3b35..317b328df78 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java @@ -23,9 +23,9 @@ import java.util.List; import java.util.Map.Entry; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.Reducer; */ @InterfaceAudience.Public public class PutCombiner extends Reducer { - private static final Log LOG = LogFactory.getLog(PutCombiner.class); + private static final Logger LOG = LoggerFactory.getLogger(PutCombiner.class); @Override protected void reduce(K row, Iterable vals, Context context) diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java index 33f09cfe007..d1c5fc207fd 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java @@ -24,17 +24,18 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.ServerName; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * Computes size of each region for given table and given column families. @@ -43,7 +44,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private public class RegionSizeCalculator { - private static final Log LOG = LogFactory.getLog(RegionSizeCalculator.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionSizeCalculator.class); /** * Maps each region to its size in bytes. diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java index c9f3022bd37..dac1d425d80 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java @@ -25,13 +25,13 @@ import java.io.OutputStream; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; @@ -42,7 +42,7 @@ import org.apache.hadoop.io.serializer.Serializer; @InterfaceAudience.Public public class ResultSerialization extends Configured implements Serialization { - private static final Log LOG = LogFactory.getLog(ResultSerialization.class); + private static final Logger LOG = LoggerFactory.getLogger(ResultSerialization.class); // The following configuration property indicates import file format version. public static final String IMPORT_FORMAT_VER = "hbase.import.version"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java index ea89c928ff1..9c7b4891814 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java @@ -22,11 +22,11 @@ import java.io.IOException; import java.util.List; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -50,7 +50,7 @@ import org.apache.hadoop.util.ToolRunner; @InterfaceAudience.Public public class RowCounter extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(RowCounter.class); + private static final Logger LOG = LoggerFactory.getLogger(RowCounter.class); /** Name of this 'program'. */ static final String NAME = "rowcounter"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java index ad65e49bc31..1c31eda294e 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java @@ -18,9 +18,9 @@ */ package org.apache.hadoop.hbase.mapreduce; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -46,7 +46,7 @@ import org.apache.hadoop.mapreduce.Partitioner; @InterfaceAudience.Public public class SimpleTotalOrderPartitioner extends Partitioner implements Configurable { - private final static Log LOG = LogFactory.getLog(SimpleTotalOrderPartitioner.class); + private final static Logger LOG = LoggerFactory.getLogger(SimpleTotalOrderPartitioner.class); @Deprecated public static final String START = "hbase.simpletotalorder.start"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java index edef842f5a5..bc528fcf83c 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java @@ -21,8 +21,6 @@ import java.io.IOException; import java.util.Iterator; import java.util.Collections; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileStatus; @@ -50,12 +48,13 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; public class SyncTable extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(SyncTable.class); + private static final Logger LOG = LoggerFactory.getLogger(SyncTable.class); static final String SOURCE_HASH_DIR_CONF_KEY = "sync.table.source.hash.dir"; static final String SOURCE_TABLE_CONF_KEY = "sync.table.source.table.name"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index 9eefac9defe..480c6118b09 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -23,13 +23,13 @@ import java.util.Collections; import java.util.List; import java.util.Locale; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionLocator; @@ -49,7 +49,7 @@ public class TableInputFormat extends TableInputFormatBase implements Configurable { @SuppressWarnings("hiding") - private static final Log LOG = LogFactory.getLog(TableInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(TableInputFormat.class); /** Job parameter that specifies the input table. */ public static final String INPUT_TABLE = "hbase.mapreduce.inputtable"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index fa2e6a2f817..5acbe2c2c12 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -28,9 +28,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; @@ -109,7 +109,7 @@ import org.apache.hadoop.util.StringUtils; public abstract class TableInputFormatBase extends InputFormat { - private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TableInputFormatBase.class); private static final String NOT_INITIALIZED = "The input format instance has not been properly " + "initialized. Ensure you call initializeTable either in your constructor or initialize " + diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index dc3bb61a64e..2856a7d608f 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -33,8 +33,6 @@ import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; @@ -70,7 +70,7 @@ import com.codahale.metrics.MetricRegistry; @SuppressWarnings({ "rawtypes", "unchecked" }) @InterfaceAudience.Public public class TableMapReduceUtil { - private static final Log LOG = LogFactory.getLog(TableMapReduceUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(TableMapReduceUtil.class); /** * Use this before submitting a TableMap job. It will appropriately set up diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java index 07a2a08cc1c..7598520a481 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java @@ -19,10 +19,9 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -52,7 +51,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; public class TableOutputFormat extends OutputFormat implements Configurable { - private static final Log LOG = LogFactory.getLog(TableOutputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(TableOutputFormat.class); /** Job parameter that specifies the output table. */ public static final String OUTPUT_TABLE = "hbase.mapred.outputtable"; @@ -232,7 +231,7 @@ implements Configurable { this.conf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, zkClientPort); } } catch(IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); throw new RuntimeException(e); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java index 511994b50d7..40c0e7c6278 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java @@ -20,10 +20,9 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.lang.reflect.Method; import java.util.Map; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -50,7 +49,7 @@ public class TableRecordReaderImpl { public static final String LOG_PER_ROW_COUNT = "hbase.mapreduce.log.scanner.rowcount"; - private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TableRecordReaderImpl.class); // HBASE_COUNTER_GROUP_NAME is the name of mapreduce counter group for HBase @VisibleForTesting @@ -254,7 +253,7 @@ public class TableRecordReaderImpl { long now = System.currentTimeMillis(); LOG.info("Mapper took " + (now-timestamp) + "ms to process " + rowcount + " rows"); - LOG.info(ioe); + LOG.info(ioe.toString(), ioe); String lastRow = lastSuccessfulRow == null ? "null" : Bytes.toStringBinary(lastSuccessfulRow); LOG.info("lastSuccessfulRow=" + lastRow); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java index 53eb9f40dca..c447e332fcf 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java @@ -18,10 +18,14 @@ package org.apache.hadoop.hbase.mapreduce; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import java.io.ByteArrayOutputStream; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -29,16 +33,12 @@ import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HDFSBlocksDistribution.HostAndWeight; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.PrivateCellUtil; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.ClientSideRegionScanner; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit; -import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; -import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; @@ -47,14 +47,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.RegionSplitter; import org.apache.hadoop.io.Writable; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.ByteArrayOutputStream; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit; +import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; +import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; /** * Hadoop MR API-agnostic implementation for mapreduce over table snapshots. @@ -64,7 +65,7 @@ public class TableSnapshotInputFormatImpl { // TODO: Snapshots files are owned in fs by the hbase user. There is no // easy way to delegate access. - public static final Log LOG = LogFactory.getLog(TableSnapshotInputFormatImpl.class); + public static final Logger LOG = LoggerFactory.getLogger(TableSnapshotInputFormatImpl.class); private static final String SNAPSHOT_NAME_KEY = "hbase.TableSnapshotInputFormat.snapshot.name"; // key for specifying the root dir of the restored snapshot diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java index 19614afbada..de42c31678e 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java @@ -23,9 +23,9 @@ import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; @@ -43,7 +43,7 @@ public class TableSplit extends InputSplit implements Writable, Comparable { /** @deprecated LOG variable would be made private. fix in hbase 3.0 */ @Deprecated - public static final Log LOG = LogFactory.getLog(TableSplit.class); + public static final Logger LOG = LoggerFactory.getLogger(TableSplit.class); // should be < 0 (@see #readFields(DataInput)) // version 1 supports Scan data member diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java index 796acb925f2..1815412721f 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALInputFormat.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -35,6 +33,8 @@ import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WAL; @@ -55,7 +55,7 @@ import org.apache.hadoop.util.StringUtils; */ @InterfaceAudience.Public public class WALInputFormat extends InputFormat { - private static final Log LOG = LogFactory.getLog(WALInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(WALInputFormat.class); public static final String START_TIME_KEY = "wal.start.time"; public static final String END_TIME_KEY = "wal.end.time"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index eff50ced97c..f4dfba58521 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -23,8 +23,6 @@ import java.text.SimpleDateFormat; import java.util.Map; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -54,6 +52,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A tool to replay WAL files as a M/R job. @@ -67,7 +67,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Public public class WALPlayer extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(WALPlayer.class); + private static final Logger LOG = LoggerFactory.getLogger(WALPlayer.class); final static String NAME = "WALPlayer"; public final static String BULK_OUTPUT_CONF_KEY = "wal.bulk.output"; public final static String TABLES_KEY = "wal.input.tables"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java index b8de9ec088f..01df2bd6d3a 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.mapreduce.replication; import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -64,7 +62,8 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -79,8 +78,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ public class VerifyReplication extends Configured implements Tool { - private static final Log LOG = - LogFactory.getLog(VerifyReplication.class); + private static final Logger LOG = + LoggerFactory.getLogger(VerifyReplication.class); public final static String NAME = "verifyrep"; private final static String PEER_CONFIG_PREFIX = NAME + ".peer."; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java index aec5fa08b98..746bb5ff82c 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java @@ -27,8 +27,6 @@ import java.util.List; import java.util.Optional; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataOutputStream; @@ -63,6 +61,8 @@ import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /* * The CompactionTool allows to execute a compaction specifying a: @@ -74,7 +74,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class CompactionTool extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(CompactionTool.class); + private static final Logger LOG = LoggerFactory.getLogger(CompactionTool.class); private final static String CONF_TMP_DIR = "hbase.tmp.dir"; private final static String CONF_COMPACT_ONCE = "hbase.compactiontool.compact.once"; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java index 462d6bc37a9..66e9e3bfc9b 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java @@ -32,8 +32,6 @@ import java.util.List; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -73,7 +71,8 @@ import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; @@ -93,7 +92,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool { /** Configuration prefix for overrides for the destination filesystem */ public static final String CONF_DEST_PREFIX = NAME + ".to."; - private static final Log LOG = LogFactory.getLog(ExportSnapshot.class); + private static final Logger LOG = LoggerFactory.getLogger(ExportSnapshot.class); private static final String MR_NUM_MAPS = "mapreduce.job.maps"; private static final String CONF_NUM_SPLITS = "snapshot.export.format.splits"; @@ -153,7 +152,7 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool { private static class ExportMapper extends Mapper { - private static final Log LOG = LogFactory.getLog(ExportMapper.class); + private static final Logger LOG = LoggerFactory.getLogger(ExportMapper.class); final static int REPORT_SIZE = 1 * 1024 * 1024; final static int BUFFER_SIZE = 64 * 1024; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 4f8b82f4fac..a86c29f044f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -47,8 +47,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -106,7 +104,8 @@ import org.apache.htrace.core.ProbabilitySampler; import org.apache.htrace.core.Sampler; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -131,7 +130,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa public class PerformanceEvaluation extends Configured implements Tool { static final String RANDOM_SEEK_SCAN = "randomSeekScan"; static final String RANDOM_READ = "randomRead"; - private static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(PerformanceEvaluation.class.getName()); private static final ObjectMapper MAPPER = new ObjectMapper(); static { MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true); @@ -361,7 +360,8 @@ public class PerformanceEvaluation extends Configured implements Tool { .add("desc", desc) .add("presplit", opts.presplitRegions) .add("splitPolicy", opts.splitPolicy) - .add("replicas", opts.replicas)); + .add("replicas", opts.replicas) + .toString()); } // remove an existing table @@ -1989,7 +1989,7 @@ public class PerformanceEvaluation extends Configured implements Tool { } static class FilteredScanTest extends TableTest { - protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(FilteredScanTest.class.getName()); FilteredScanTest(Connection con, TestOptions options, Status status) { super(con, options, status); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java index 665c5470197..1b137792fb4 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java @@ -18,9 +18,12 @@ package org.apache.hadoop.hbase.mapred; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.Result; @@ -36,18 +39,17 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.RunningJob; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.Iterator; -import java.util.List; - -import static org.junit.Assert.assertTrue; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({ VerySlowMapReduceTests.class, LargeTests.class }) public class TestMultiTableSnapshotInputFormat extends org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat { - private static final Log LOG = LogFactory.getLog(TestMultiTableSnapshotInputFormat.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMultiTableSnapshotInputFormat.class); @Override protected void runJob(String jobName, Configuration c, List scans) diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java index ace2ffab409..369f1c11745 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java @@ -32,8 +32,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.Cell; @@ -68,6 +66,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This tests the TableInputFormat and its recovery semantics @@ -75,7 +75,7 @@ import org.mockito.stubbing.Answer; @Category({MapReduceTests.class, LargeTests.class}) public class TestTableInputFormat { - private static final Log LOG = LogFactory.getLog(TestTableInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java index 3f905cffeae..d300e7d7451 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -41,6 +39,8 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.RunningJob; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test Map/Reduce job over HBase tables. The map/reduce process we're testing @@ -50,10 +50,10 @@ import org.junit.experimental.categories.Category; @Category({MapReduceTests.class, LargeTests.class}) @SuppressWarnings("deprecation") public class TestTableMapReduce extends TestTableMapReduceBase { - private static final Log LOG = - LogFactory.getLog(TestTableMapReduce.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(TestTableMapReduce.class.getName()); - protected Log getLog() { return LOG; } + protected Logger getLog() { return LOG; } /** * Pass the given key and processed record reduce diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java index ac2f20d895b..4a601106482 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java @@ -28,8 +28,6 @@ import java.util.Iterator; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -53,15 +51,16 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; @Category({MapReduceTests.class, LargeTests.class}) public class TestTableMapReduceUtil { - private static final Log LOG = LogFactory - .getLog(TestTableMapReduceUtil.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestTableMapReduceUtil.class); private static Table presidentsTable; private static final String TABLE_NAME = "People"; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java index 835117c020a..785380f5a0a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableOutputFormatConnectionExhaust.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.mapred; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -30,6 +28,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; @@ -44,8 +44,8 @@ import static org.junit.Assert.fail; @Category(MediumTests.class) public class TestTableOutputFormatConnectionExhaust { - private static final Log LOG = - LogFactory.getLog(TestTableOutputFormatConnectionExhaust.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestTableOutputFormatConnectionExhaust.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); static final String TABLE = "TestTableOutputFormatConnectionExhaust"; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java index c717fa96a99..b8d03bac529 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java @@ -18,9 +18,17 @@ package org.apache.hadoop.hbase.mapreduce; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.NavigableMap; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -42,17 +50,10 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.NavigableMap; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Base set of tests and setup for input formats touching multiple tables. @@ -60,7 +61,7 @@ import static org.junit.Assert.assertTrue; public abstract class MultiTableInputFormatTestBase { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - static final Log LOG = LogFactory.getLog(TestMultiTableInputFormat.class); + static final Logger LOG = LoggerFactory.getLogger(TestMultiTableInputFormat.class); public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); static final String TABLE_NAME = "scantest"; static final byte[] INPUT_FAMILY = Bytes.toBytes("contents"); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java index 4e11275b232..d753d40add9 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hbase.mapreduce; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -44,6 +42,8 @@ import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertFalse; @@ -51,7 +51,7 @@ import java.io.IOException; import java.util.Arrays; public abstract class TableSnapshotInputFormatTestBase { - private static final Log LOG = LogFactory.getLog(TableSnapshotInputFormatTestBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TableSnapshotInputFormatTestBase.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); protected final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java index 5253ea8e3f9..6b30c9cd76f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java @@ -40,8 +40,6 @@ import java.util.concurrent.Callable; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -110,6 +108,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Simple test for {@link HFileOutputFormat2}. @@ -132,7 +132,7 @@ public class TestHFileOutputFormat2 { private HBaseTestingUtility util = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestHFileOutputFormat2.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileOutputFormat2.class); /** * Simple mapper that makes KeyValue output. diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java index 87e7852b635..6b3c71ce2b1 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -45,6 +43,8 @@ import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Basic test for the HashTable M/R tool @@ -52,7 +52,7 @@ import org.junit.rules.TestName; @Category(LargeTests.class) public class TestHashTable { - private static final Log LOG = LogFactory.getLog(TestHashTable.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHashTable.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 22bfb582f17..65d53f3cd3b 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -35,8 +35,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -87,6 +85,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests the table import and table export MR job functionality @@ -95,7 +95,7 @@ import org.mockito.stubbing.Answer; //TODO : Remove this in 3.0 public class TestImportExport { - private static final Log LOG = LogFactory.getLog(TestImportExport.class); + private static final Logger LOG = LoggerFactory.getLogger(TestImportExport.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1"); private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2"); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java index a47bef1dcd0..5d4c8a3ad70 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java @@ -27,8 +27,6 @@ import java.util.List; import java.util.Optional; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -66,12 +64,15 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MapReduceTests.class, LargeTests.class}) public class TestImportTSVWithOperationAttributes implements Configurable { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestImportTSVWithOperationAttributes.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestImportTSVWithOperationAttributes.class); protected static final String NAME = TestImportTsv.class.getSimpleName(); protected static HBaseTestingUtility util = new HBaseTestingUtility(); @@ -95,10 +96,12 @@ public class TestImportTSVWithOperationAttributes implements Configurable { @Rule public TestName name = new TestName(); + @Override public Configuration getConf() { return util.getConfiguration(); } + @Override public void setConf(Configuration conf) { throw new IllegalArgumentException("setConf not supported"); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java index f121f20a0f8..9ddbc65dda0 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java @@ -26,8 +26,6 @@ import java.util.List; import java.util.Optional; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -54,11 +52,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MapReduceTests.class, LargeTests.class}) public class TestImportTSVWithTTLs implements Configurable { - protected static final Log LOG = LogFactory.getLog(TestImportTSVWithTTLs.class); + protected static final Logger LOG = LoggerFactory.getLogger(TestImportTSVWithTTLs.class); protected static final String NAME = TestImportTsv.class.getSimpleName(); protected static HBaseTestingUtility util = new HBaseTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java index 469284733fc..8d3f3df1064 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java @@ -29,8 +29,6 @@ import java.util.List; import java.util.Set; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -74,11 +72,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MapReduceTests.class, LargeTests.class}) public class TestImportTSVWithVisibilityLabels implements Configurable { - private static final Log LOG = LogFactory.getLog(TestImportTSVWithVisibilityLabels.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestImportTSVWithVisibilityLabels.class); protected static final String NAME = TestImportTsv.class.getSimpleName(); protected static HBaseTestingUtility util = new HBaseTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java index f6fcfa38ec8..9484a94a395 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java @@ -32,8 +32,6 @@ import java.util.Map; import java.util.Set; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -71,11 +69,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({VerySlowMapReduceTests.class, LargeTests.class}) public class TestImportTsv implements Configurable { - private static final Log LOG = LogFactory.getLog(TestImportTsv.class); + private static final Logger LOG = LoggerFactory.getLogger(TestImportTsv.class); protected static final String NAME = TestImportTsv.class.getSimpleName(); protected static HBaseTestingUtility util = new HBaseTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java index 8187b73aa46..7eeee707ea2 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestJarFinder.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.mapreduce; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -48,7 +48,7 @@ public class TestJarFinder { public void testJar() throws Exception { //picking a class that is for sure in a JAR in the classpath - String jar = JarFinder.getJar(LogFactory.class); + String jar = JarFinder.getJar(LoggerFactory.class); Assert.assertTrue(new File(jar).exists()); } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java index 530d9c57e3c..354f5e77ec8 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java @@ -18,10 +18,11 @@ package org.apache.hadoop.hbase.mapreduce; -import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimaps; -import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; @@ -36,10 +37,11 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; -import java.io.IOException; -import java.util.Collection; -import java.util.List; -import java.util.Map; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimaps; + +import edu.umd.cs.findbugs.annotations.Nullable; @Category({ VerySlowMapReduceTests.class, LargeTests.class }) public class TestMultiTableSnapshotInputFormat extends MultiTableInputFormatTestBase { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java index 694a359a5a3..357f3750ef5 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java @@ -26,8 +26,6 @@ import java.util.Iterator; import java.util.Map; import java.util.NavigableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; @@ -50,6 +48,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test Map/Reduce job over HBase tables. The map/reduce process we're testing @@ -58,7 +58,7 @@ import org.junit.experimental.categories.Category; */ @Category({MapReduceTests.class, LargeTests.class}) public class TestMultithreadedTableMapper { - private static final Log LOG = LogFactory.getLog(TestMultithreadedTableMapper.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMultithreadedTableMapper.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); static final TableName MULTI_REGION_TABLE_NAME = TableName.valueOf("mrtest"); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java index 3b84e2d2d34..aba17147537 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java @@ -27,8 +27,6 @@ import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -46,6 +44,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the rowcounter map reduce job. @@ -54,7 +54,7 @@ import org.junit.rules.TestRule; public class TestRowCounter { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestRowCounter.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRowCounter.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static String TABLE_NAME = "testRowCounter"; private final static String TABLE_NAME_TS_RANGE = "testRowCounter_ts_range"; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java index 1e940d46cfd..e2a04241a59 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -48,7 +46,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; /** @@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; public class TestSyncTable { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestSyncTable.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSyncTable.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java index db50899cfb6..5453054373f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java @@ -32,8 +32,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -64,6 +62,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This tests the TableInputFormat and its recovery semantics @@ -72,7 +72,7 @@ import org.mockito.stubbing.Answer; @Category(LargeTests.class) public class TestTableInputFormat { - private static final Log LOG = LogFactory.getLog(TestTableInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static MiniMRCluster mrCluster; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java index d127adb22f8..3d970713790 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -47,6 +45,8 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -62,7 +62,7 @@ import org.junit.BeforeClass; */ public abstract class TestTableInputFormatScanBase { - private static final Log LOG = LogFactory.getLog(TestTableInputFormatScanBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormatScanBase.class); static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); static final TableName TABLE_NAME = TableName.valueOf("scantest"); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java index d702e0d3c90..9c38a0ddf3a 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java @@ -27,8 +27,6 @@ import java.io.IOException; import java.util.Map; import java.util.NavigableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; @@ -49,6 +47,8 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test Map/Reduce job over HBase tables. The map/reduce process we're testing @@ -58,10 +58,10 @@ import org.junit.experimental.categories.Category; @Category({VerySlowMapReduceTests.class, LargeTests.class}) public class TestTableMapReduce extends TestTableMapReduceBase { - private static final Log LOG = LogFactory.getLog(TestTableMapReduce.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableMapReduce.class); @Override - protected Log getLog() { return LOG; } + protected Logger getLog() { return LOG; } /** * Pass the given key and processed record reduce diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java index 27bf0637ecb..60e2622856c 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java @@ -26,7 +26,6 @@ import java.util.Iterator; import java.util.Map; import java.util.NavigableMap; -import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; @@ -46,6 +45,7 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; +import org.slf4j.Logger; /** * A base class for a test Map/Reduce job over HBase tables. The map/reduce process we're testing @@ -70,7 +70,7 @@ public abstract class TestTableMapReduceBase { /** * Retrieve my logger instance. */ - protected abstract Log getLog(); + protected abstract Logger getLog(); /** * Handles API-specifics for setting up and executing the job. diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java index 2ed6081e31b..11b7657fc3f 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java @@ -27,8 +27,6 @@ import static org.mockito.Mockito.when; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -58,7 +56,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.util.Arrays; @@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.util.RegionSplitter; @Category({VerySlowMapReduceTests.class, LargeTests.class}) public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBase { - private static final Log LOG = LogFactory.getLog(TestTableSnapshotInputFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableSnapshotInputFormat.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java index 6796c944adc..3f0c591f2f0 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.mapreduce; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; @@ -51,6 +49,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; @@ -62,7 +62,7 @@ import java.util.TreeMap; @Category({MapReduceTests.class, LargeTests.class}) public class TestTimeRangeMapRed { - private final static Log log = LogFactory.getLog(TestTimeRangeMapRed.class); + private final static Logger log = LoggerFactory.getLogger(TestTimeRangeMapRed.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private Admin admin; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java index 65a34214619..18bb1353ee6 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java @@ -26,8 +26,6 @@ import java.util.NavigableMap; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -57,13 +55,15 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * JUnit tests for the WALRecordReader */ @Category({MapReduceTests.class, MediumTests.class}) public class TestWALRecordReader { - private static final Log LOG = LogFactory.getLog(TestWALRecordReader.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALRecordReader.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf; private static FileSystem fs; diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java index 143f585dbdd..8aefa4d1693 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java @@ -30,8 +30,6 @@ import java.util.List; import java.util.NavigableMap; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -84,14 +82,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos; @Category({ReplicationTests.class, LargeTests.class}) public class TestReplicationSmallTests extends TestReplicationBase { - private static final Log LOG = LogFactory.getLog(TestReplicationSmallTests.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSmallTests.class); private static final String PEER_ID = "2"; @Rule diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java index 98d6311b8f4..8703ca02065 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java @@ -28,10 +28,9 @@ import java.net.URI; import java.util.ArrayList; import java.util.HashSet; import java.util.List; +import java.util.Objects; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -51,13 +50,13 @@ import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; @@ -68,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot public class TestExportSnapshot { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestExportSnapshot.class); + private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshot.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -350,7 +349,7 @@ public class TestExportSnapshot { FileStatus[] list = FSUtils.listStatus(fs, dir); if (list != null) { for (FileStatus fstat: list) { - LOG.debug(fstat.getPath()); + LOG.debug(Objects.toString(fstat.getPath())); if (fstat.isDirectory()) { files.addAll(listFiles(fs, root, fstat.getPath())); } else { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java index 00778502349..f3d08ba52f7 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshotNoCluster.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hbase.snapshot; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -37,6 +35,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test Export Snapshot Tool @@ -45,7 +45,7 @@ import org.junit.rules.TestRule; public class TestExportSnapshotNoCluster { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestExportSnapshotNoCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestExportSnapshotNoCluster.class); protected final static HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index 1d9b74e9898..93a4798eafa 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -29,8 +29,6 @@ import java.util.concurrent.atomic.AtomicReference; import javax.crypto.spec.SecretKeySpec; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.HBaseKerberosUtils; @@ -67,7 +68,7 @@ import org.apache.hadoop.util.ToolRunner; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class LoadTestTool extends AbstractHBaseTool { - private static final Log LOG = LogFactory.getLog(LoadTestTool.class); + private static final Logger LOG = LoggerFactory.getLogger(LoadTestTool.class); private static final String COLON = ":"; /** Table name for the test */ @@ -579,7 +580,7 @@ public class LoadTestTool extends AbstractHBaseTool { try { addAuthInfoToConf(authConfig, conf, superUser, userNames); } catch (IOException exp) { - LOG.error(exp); + LOG.error(exp.toString(), exp); return EXIT_FAILURE; } userOwner = User.create(HBaseKerberosUtils.loginAndReturnUGI(conf, superUser)); @@ -609,7 +610,8 @@ public class LoadTestTool extends AbstractHBaseTool { AccessControlClient.grant(ConnectionFactory.createConnection(conf), tableName, userOwner.getShortName(), null, null, actions); } catch (Throwable e) { - LOG.fatal("Error in granting permission for the user " + userOwner.getShortName(), e); + LOG.error(HBaseMarkers.FATAL, "Error in granting permission for the user " + + userOwner.getShortName(), e); return EXIT_FAILURE; } } diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml index eebcedb9e9b..4d298337bc0 100644 --- a/hbase-metrics-api/pom.xml +++ b/hbase-metrics-api/pom.xml @@ -85,8 +85,8 @@ - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.commons diff --git a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java index 0c29e22e37b..d398c257074 100644 --- a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java +++ b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java @@ -25,16 +25,16 @@ import java.util.ArrayList; import java.util.List; import java.util.ServiceLoader; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @InterfaceAudience.Private public class MetricRegistriesLoader { - private static final Log LOG = LogFactory.getLog(MetricRegistries.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricRegistries.class); private static final String defaultClass = "org.apache.hadoop.hbase.metrics.impl.MetricRegistriesImpl"; diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml index 0f0f00d1c8e..32b986cd36b 100644 --- a/hbase-procedure/pom.xml +++ b/hbase-procedure/pom.xml @@ -86,8 +86,8 @@ hbase-shaded-miscellaneous - commons-logging - commons-logging + org.slf4j + slf4j-api commons-cli diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java index 3e474513d83..fbfa5b2f10d 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java @@ -23,13 +23,13 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public abstract class AbstractProcedureScheduler implements ProcedureScheduler { - private static final Log LOG = LogFactory.getLog(AbstractProcedureScheduler.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractProcedureScheduler.class); private final ReentrantLock schedulerLock = new ReentrantLock(); private final Condition schedWaitCond = schedulerLock.newCondition(); private boolean running = false; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java index cbbd0e73d50..64c0233e7ec 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java @@ -23,10 +23,10 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.exceptions.TimeoutIOException; import org.apache.hadoop.hbase.metrics.Counter; import org.apache.hadoop.hbase.metrics.Histogram; @@ -88,7 +88,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private @InterfaceStability.Evolving public abstract class Procedure implements Comparable> { - private static final Log LOG = LogFactory.getLog(Procedure.class); + private static final Logger LOG = LoggerFactory.getLogger(Procedure.class); public static final long NO_PROC_ID = -1; protected static final int NO_TIMEOUT = -1; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java index 20803f453fe..fb3d7edaa24 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureEvent.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hbase.procedure2; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -30,7 +29,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class ProcedureEvent { - private static final Log LOG = LogFactory.getLog(ProcedureEvent.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureEvent.class); private final T object; private boolean ready = false; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java index ac0487165e3..982525b75de 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java @@ -40,13 +40,14 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.DelayQueue; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.procedure2.Procedure.LockState; import org.apache.hadoop.hbase.procedure2.store.ProcedureStore; import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator; @@ -75,7 +76,7 @@ import org.apache.hadoop.hbase.util.Threads; @InterfaceAudience.Private @InterfaceStability.Evolving public class ProcedureExecutor { - private static final Log LOG = LogFactory.getLog(ProcedureExecutor.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureExecutor.class); public static final String CHECK_OWNER_SET_CONF_KEY = "hbase.procedure.check.owner.set"; private static final boolean DEFAULT_CHECK_OWNER_SET = false; @@ -160,7 +161,7 @@ public class ProcedureExecutor { */ private static class CompletedProcedureCleaner extends ProcedureInMemoryChore { - private static final Log LOG = LogFactory.getLog(CompletedProcedureCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(CompletedProcedureCleaner.class); private static final String CLEANER_INTERVAL_CONF_KEY = "hbase.procedure.cleaner.interval"; private static final int DEFAULT_CLEANER_INTERVAL = 30 * 1000; // 30sec @@ -1364,7 +1365,7 @@ public class ProcedureExecutor { return LockState.LOCK_YIELD_WAIT; } catch (Throwable e) { // Catch NullPointerExceptions or similar errors... - LOG.fatal("CODE-BUG: Uncaught runtime exception for " + proc, e); + LOG.error(HBaseMarkers.FATAL, "CODE-BUG: Uncaught runtime exception for " + proc, e); } // allows to kill the executor before something is stored to the wal. diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java index 41676dc00b7..3842231e47f 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java @@ -32,10 +32,10 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.procedure2.util.DelayedUtil; import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedContainerWithTimestamp; import org.apache.hadoop.hbase.procedure2.util.DelayedUtil.DelayedWithTimeout; @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultima */ @InterfaceAudience.Private public abstract class RemoteProcedureDispatcher> { - private static final Log LOG = LogFactory.getLog(RemoteProcedureDispatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(RemoteProcedureDispatcher.class); public static final String THREAD_POOL_SIZE_CONF_KEY = "hbase.procedure.remote.dispatcher.threadpool.size"; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java index 1f928a42b57..46185eaae7b 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RootProcedureState.java @@ -23,10 +23,10 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState; /** @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu @InterfaceAudience.Private @InterfaceStability.Evolving class RootProcedureState { - private static final Log LOG = LogFactory.getLog(RootProcedureState.class); + private static final Logger LOG = LoggerFactory.getLogger(RootProcedureState.class); private enum State { RUNNING, // The Procedure is running or ready to run diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java index 893ee0cd57e..ade07cc825b 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java @@ -24,10 +24,10 @@ import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData; /** @@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMa @InterfaceStability.Evolving public abstract class StateMachineProcedure extends Procedure { - private static final Log LOG = LogFactory.getLog(StateMachineProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(StateMachineProcedure.class); private static final int EOF_STATE = Integer.MIN_VALUE; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java index edfb3adef28..1e9ef6e78dc 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFile.java @@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.procedure2.store.wal; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader; @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu @InterfaceAudience.Private @InterfaceStability.Evolving public class ProcedureWALFile implements Comparable { - private static final Log LOG = LogFactory.getLog(ProcedureWALFile.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFile.class); private ProcedureWALHeader header; private FSDataInputStream stream; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java index a34afe5afe3..84edd0fbcfa 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java @@ -25,12 +25,12 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.Iterator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureUtil; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu @InterfaceAudience.Private @InterfaceStability.Evolving public final class ProcedureWALFormat { - private static final Log LOG = LogFactory.getLog(ProcedureWALFormat.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFormat.class); static final byte LOG_TYPE_STREAM = 0; static final byte LOG_TYPE_COMPACTED = 1; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java index 36d82701418..0e110884d80 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java @@ -22,11 +22,11 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureUtil; import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator; @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu @InterfaceAudience.Private @InterfaceStability.Evolving public class ProcedureWALFormatReader { - private static final Log LOG = LogFactory.getLog(ProcedureWALFormatReader.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureWALFormatReader.class); // ============================================================================================== // We read the WALs in reverse order from the newest to the oldest. diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java index 84cda6526ff..da9544fb6a7 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java @@ -37,8 +37,6 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.collections4.queue.CircularFifoQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileAlreadyExistsException; @@ -47,6 +45,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreBase; import org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker; @@ -59,6 +58,8 @@ import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -67,7 +68,7 @@ import org.apache.yetus.audience.InterfaceStability; @InterfaceAudience.Private @InterfaceStability.Evolving public class WALProcedureStore extends ProcedureStoreBase { - private static final Log LOG = LogFactory.getLog(WALProcedureStore.class); + private static final Logger LOG = LoggerFactory.getLogger(WALProcedureStore.class); public static final String LOG_PREFIX = "pv2-"; /** Used to construct the name of the log directory for master procedures */ public static final String MASTER_PROCEDURE_LOGDIR = "MasterProcWALs"; @@ -496,8 +497,8 @@ public class WALProcedureStore extends ProcedureStoreBase { } catch (IOException e) { // We are not able to serialize the procedure. // this is a code error, and we are not able to go on. - LOG.fatal("Unable to serialize one of the procedure: proc=" + proc + - ", subprocs=" + Arrays.toString(subprocs), e); + LOG.error(HBaseMarkers.FATAL, "Unable to serialize one of the procedure: proc=" + + proc + ", subprocs=" + Arrays.toString(subprocs), e); throw new RuntimeException(e); } finally { releaseSlot(slot); @@ -525,7 +526,8 @@ public class WALProcedureStore extends ProcedureStoreBase { } catch (IOException e) { // We are not able to serialize the procedure. // this is a code error, and we are not able to go on. - LOG.fatal("Unable to serialize one of the procedure: " + Arrays.toString(procs), e); + LOG.error(HBaseMarkers.FATAL, "Unable to serialize one of the procedure: " + + Arrays.toString(procs), e); throw new RuntimeException(e); } finally { releaseSlot(slot); @@ -548,7 +550,7 @@ public class WALProcedureStore extends ProcedureStoreBase { } catch (IOException e) { // We are not able to serialize the procedure. // this is a code error, and we are not able to go on. - LOG.fatal("Unable to serialize the procedure: " + proc, e); + LOG.error(HBaseMarkers.FATAL, "Unable to serialize the procedure: " + proc, e); throw new RuntimeException(e); } finally { releaseSlot(slot); @@ -571,7 +573,7 @@ public class WALProcedureStore extends ProcedureStoreBase { } catch (IOException e) { // We are not able to serialize the procedure. // this is a code error, and we are not able to go on. - LOG.fatal("Unable to serialize the procedure: " + procId, e); + LOG.error(HBaseMarkers.FATAL, "Unable to serialize the procedure: " + procId, e); throw new RuntimeException(e); } finally { releaseSlot(slot); @@ -596,7 +598,7 @@ public class WALProcedureStore extends ProcedureStoreBase { } catch (IOException e) { // We are not able to serialize the procedure. // this is a code error, and we are not able to go on. - LOG.fatal("Unable to serialize the procedure: " + proc, e); + LOG.error(HBaseMarkers.FATAL, "Unable to serialize the procedure: " + proc, e); throw new RuntimeException(e); } finally { releaseSlot(slot); @@ -632,7 +634,7 @@ public class WALProcedureStore extends ProcedureStoreBase { } catch (IOException e) { // We are not able to serialize the procedure. // this is a code error, and we are not able to go on. - LOG.fatal("Unable to serialize the procedures: " + Arrays.toString(procIds), e); + LOG.error("Unable to serialize the procedures: " + Arrays.toString(procIds), e); throw new RuntimeException(e); } finally { releaseSlot(slot); @@ -902,7 +904,7 @@ public class WALProcedureStore extends ProcedureStoreBase { LOG.warn("Unable to roll the log, attempt=" + (i + 1), e); } } - LOG.fatal("Unable to roll the log"); + LOG.error(HBaseMarkers.FATAL, "Unable to roll the log"); return false; } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java index 6e0c02eb2aa..2558a31f08d 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java @@ -27,8 +27,6 @@ import java.util.ArrayList; import java.util.Set; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -44,9 +42,11 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState; import org.apache.hadoop.hbase.util.NonceKey; import org.apache.hadoop.hbase.util.Threads; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ProcedureTestingUtility { - private static final Log LOG = LogFactory.getLog(ProcedureTestingUtility.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureTestingUtility.class); private ProcedureTestingUtility() { } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java index 4c1611a6c50..79ce73617ce 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.procedure2; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -32,13 +30,15 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestChildProcedures { - private static final Log LOG = LogFactory.getLog(TestChildProcedures.class); + private static final Logger LOG = LoggerFactory.getLogger(TestChildProcedures.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 1; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java index d2b2b7d54d5..9588d998daa 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureEvents.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2; import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -37,12 +35,14 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureEvents { - private static final Log LOG = LogFactory.getLog(TestProcedureEvents.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureEvents.class); private TestProcEnv procEnv; private ProcedureStore procStore; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java index ed6d512df70..ae781cd9139 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java @@ -21,9 +21,8 @@ package org.apache.hadoop.hbase.procedure2; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -36,13 +35,15 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureExecution { - private static final Log LOG = LogFactory.getLog(TestProcedureExecution.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureExecution.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 1; private static final Procedure NULL_PROC = null; @@ -136,7 +137,7 @@ public class TestProcedureExecution { // subProc1 has a "null" subprocedure which is catched as InvalidArgument // failed state with 2 execute and 2 rollback - LOG.info(state); + LOG.info(Objects.toString(state)); Procedure result = procExecutor.getResult(rootId); assertTrue(state.toString(), result.isFailed()); ProcedureTestingUtility.assertIsIllegalArgumentException(result); @@ -157,7 +158,7 @@ public class TestProcedureExecution { long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, rootProc); // successful state, with 3 execute - LOG.info(state); + LOG.info(Objects.toString(state)); Procedure result = procExecutor.getResult(rootId); ProcedureTestingUtility.assertProcNotFailed(result); assertEquals(state.toString(), 3, state.size()); @@ -173,7 +174,7 @@ public class TestProcedureExecution { long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, rootProc); // the 3rd proc fail, rollback after 2 successful execution - LOG.info(state); + LOG.info(Objects.toString(state)); Procedure result = procExecutor.getResult(rootId); assertTrue(state.toString(), result.isFailed()); LOG.info(result.getException().getMessage()); @@ -300,7 +301,7 @@ public class TestProcedureExecution { long startTime = EnvironmentEdgeManager.currentTime(); long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc); long execTime = EnvironmentEdgeManager.currentTime() - startTime; - LOG.info(state); + LOG.info(Objects.toString(state)); assertTrue("we didn't wait enough execTime=" + execTime, execTime >= PROC_TIMEOUT_MSEC); Procedure result = procExecutor.getResult(rootId); assertTrue(state.toString(), result.isFailed()); @@ -316,7 +317,7 @@ public class TestProcedureExecution { Procedure proc = new TestWaitingProcedure("wproc", state, true); proc.setTimeout(2500); long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc); - LOG.info(state); + LOG.info(Objects.toString(state)); Procedure result = procExecutor.getResult(rootId); assertTrue(state.toString(), result.isFailed()); ProcedureTestingUtility.assertIsTimeoutException(result); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java index 289987be8b4..29a0472a452 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecutor.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility; @@ -36,12 +34,14 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureExecutor { - private static final Log LOG = LogFactory.getLog(TestProcedureExecutor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureExecutor.class); private TestProcEnv procEnv; private NoopProcedureStore procStore; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java index 50ccfa60f35..6546ea3c602 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -32,6 +30,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -39,7 +39,7 @@ import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureInMemoryChore { - private static final Log LOG = LogFactory.getLog(TestProcedureInMemoryChore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureInMemoryChore.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 1; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java index 6246629ef90..0550a91abe1 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureMetrics.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hbase.procedure2; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -30,6 +28,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; @@ -39,7 +39,7 @@ import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureMetrics { - private static final Log LOG = LogFactory.getLog(TestProcedureMetrics.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureMetrics.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 1; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java index 12a8012ef8a..bebfae001a3 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureNonce.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -39,6 +37,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureNonce { - private static final Log LOG = LogFactory.getLog(TestProcedureNonce.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureNonce.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 2; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java index 06f8833a583..8fe56fe00b3 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; @@ -39,6 +37,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureRecovery { - private static final Log LOG = LogFactory.getLog(TestProcedureRecovery.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureRecovery.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 1; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java index 12b21847db7..20d60ceb45c 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -37,6 +35,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -44,7 +44,7 @@ import static org.junit.Assert.fail; @Category({MasterTests.class, LargeTests.class}) public class TestProcedureReplayOrder { - private static final Log LOG = LogFactory.getLog(TestProcedureReplayOrder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureReplayOrder.class); private static final int NUM_THREADS = 16; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java index 1c8f1ebb66c..6116736d3cf 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSchedulerConcurrency.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.ConcurrentSkipListSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.NoopProcedure; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -33,6 +31,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -40,7 +40,7 @@ import static org.junit.Assert.assertTrue; @Category({MasterTests.class, MediumTests.class}) public class TestProcedureSchedulerConcurrency { - private static final Log LOG = LogFactory.getLog(TestProcedureEvents.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureEvents.class); private SimpleProcedureScheduler procSched; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java index 3803abae272..1a426505eab 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureSuspended.java @@ -25,8 +25,6 @@ import java.util.ArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore; import org.apache.hadoop.hbase.procedure2.store.ProcedureStore; @@ -37,10 +35,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureSuspended { - private static final Log LOG = LogFactory.getLog(TestProcedureSuspended.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureSuspended.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 1; private static final Procedure NULL_PROC = null; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java index cbe50f2c2d3..f304ba72300 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.procedure2; import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -35,13 +33,15 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestStateMachineProcedure { - private static final Log LOG = LogFactory.getLog(TestStateMachineProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestStateMachineProcedure.class); private static final Exception TEST_FAILURE_EXCEPTION = new Exception("test failure") { @Override diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java index 017992cfea6..202353526aa 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java @@ -24,8 +24,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; @@ -37,13 +35,15 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestYieldProcedures { - private static final Log LOG = LogFactory.getLog(TestYieldProcedures.class); + private static final Logger LOG = LoggerFactory.getLogger(TestYieldProcedures.class); private static final int PROCEDURE_EXECUTOR_SLOTS = 1; private static final Procedure NULL_PROC = null; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java index 550116e2e16..e4766f6f15a 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java @@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.procedure2.store; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.BitSetNode; import static org.junit.Assert.assertEquals; @@ -35,7 +35,7 @@ import static org.junit.Assert.assertTrue; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureStoreTracker { - private static final Log LOG = LogFactory.getLog(TestProcedureStoreTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureStoreTracker.class); @Test public void testSeqInsertAndDelete() { diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java index 98ec1146e79..31c9cf3ee20 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.Random; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -40,6 +38,8 @@ import org.junit.Before; import org.junit.Test; import org.junit.Ignore; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -48,7 +48,7 @@ import static org.junit.Assert.fail; @Category({MasterTests.class, LargeTests.class}) public class TestStressWALProcedureStore { - private static final Log LOG = LogFactory.getLog(TestWALProcedureStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStore.class); private static final int PROCEDURE_STORE_SLOTS = 8; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java index 98b1b7c9d6a..a7bab8f625a 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestWALProcedureStore.java @@ -28,8 +28,6 @@ import java.util.Comparator; import java.util.HashSet; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileStatus; @@ -53,6 +51,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -61,7 +61,7 @@ import static org.junit.Assert.fail; @Category({MasterTests.class, SmallTests.class}) public class TestWALProcedureStore { - private static final Log LOG = LogFactory.getLog(TestWALProcedureStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStore.class); private static final int PROCEDURE_STORE_SLOTS = 1; private static final Procedure NULL_PROC = null; diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java index 019b4567fde..dcb133e9ef0 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestDelayedUtil.java @@ -18,18 +18,18 @@ package org.apache.hadoop.hbase.procedure2.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; @Category({MasterTests.class, SmallTests.class}) public class TestDelayedUtil { - private static final Log LOG = LogFactory.getLog(TestDelayedUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDelayedUtil.class); @Test public void testDelayedContainerEquals() { diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml index ca602f0c3ac..506255d2318 100644 --- a/hbase-protocol-shaded/pom.xml +++ b/hbase-protocol-shaded/pom.xml @@ -160,6 +160,7 @@ junit:junit log4j:log4j commons-logging:commons-logging + org.slf4j:slf4j-api org.apache.yetus:audience-annotations com.github.stephenc.fingbugs:* diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml index e8479cf15d7..6a2c14605b0 100644 --- a/hbase-protocol/pom.xml +++ b/hbase-protocol/pom.xml @@ -120,8 +120,8 @@ protobuf-java - commons-logging - commons-logging + org.slf4j + slf4j-api diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java index e8491f7e3d4..65f1cc67214 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.ByteString; import com.google.protobuf.HBaseZeroCopyByteString; @@ -29,7 +29,7 @@ import com.google.protobuf.HBaseZeroCopyByteString; */ @InterfaceAudience.Private public class ByteStringer { - private static final Log LOG = LogFactory.getLog(ByteStringer.class); + private static final Logger LOG = LoggerFactory.getLogger(ByteStringer.class); /** * Flag set at class loading time. diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml index 8c6b4285c23..994360cc47b 100644 --- a/hbase-replication/pom.xml +++ b/hbase-replication/pom.xml @@ -127,8 +127,8 @@ commons-lang3
- commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.zookeeper diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java index 214a3136313..454d09ce352 100644 --- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java +++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerZKImpl.java @@ -25,13 +25,12 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil; import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos; import org.apache.hadoop.hbase.zookeeper.ZKNodeTracker; @@ -40,11 +39,13 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NodeExistsException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class ReplicationPeerZKImpl extends ReplicationStateZKBase implements ReplicationPeer, Abortable, Closeable { - private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerZKImpl.class); private ReplicationPeerConfig peerConfig; private final String id; @@ -187,8 +188,8 @@ public class ReplicationPeerZKImpl extends ReplicationStateZKBase @Override public void abort(String why, Throwable e) { - LOG.fatal("The ReplicationPeer corresponding to peer " + peerConfig - + " was aborted for the following reason(s):" + why, e); + LOG.error(HBaseMarkers.FATAL, "The ReplicationPeer corresponding to peer " + + peerConfig + " was aborted for the following reason(s):" + why, e); } @Override diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java index ca99f65a295..8f5e8d57e91 100644 --- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java +++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java @@ -28,8 +28,6 @@ import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.CompoundConfiguration; @@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides an implementation of the ReplicationPeers interface using ZooKeeper. The @@ -82,7 +82,7 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re private final ReplicationQueuesClient queuesClient; private Abortable abortable; - private static final Log LOG = LogFactory.getLog(ReplicationPeersZKImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeersZKImpl.class); public ReplicationPeersZKImpl(final ZKWatcher zk, final Configuration conf, final ReplicationQueuesClient queuesClient, Abortable abortable) { diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java index 68b7ebeec96..ecd888f51ea 100644 --- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java +++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java @@ -23,10 +23,9 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ServerName; /** @@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.ServerName; */ @InterfaceAudience.Private public class ReplicationQueueInfo { - private static final Log LOG = LogFactory.getLog(ReplicationQueueInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueueInfo.class); private final String peerId; private final String peerClusterZnode; diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java index b998f159752..e85b42ae0aa 100644 --- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java +++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.replication; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; @@ -33,12 +31,14 @@ import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class ReplicationQueuesClientZKImpl extends ReplicationStateZKBase implements ReplicationQueuesClient { - Log LOG = LogFactory.getLog(ReplicationQueuesClientZKImpl.class); + Logger LOG = LoggerFactory.getLogger(ReplicationQueuesClientZKImpl.class); public ReplicationQueuesClientZKImpl(ReplicationQueuesClientArguments args) { this(args.getZk(), args.getConf(), args.getAbortable()); diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java index 95fd29430c6..7551cb76608 100644 --- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java +++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.SortedSet; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Abortable; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides an implementation of the @@ -67,7 +67,7 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R /** Znode containing all replication queues for this region server. */ private String myQueuesZnode; - private static final Log LOG = LogFactory.getLog(ReplicationQueuesZKImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueuesZKImpl.class); public ReplicationQueuesZKImpl(ReplicationQueuesArguments args) { this(args.getZk(), args.getConf(), args.getAbortable()); diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java index 300a93b6025..9a1d9aaefb9 100644 --- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java +++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; @@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is a ZooKeeper implementation of the ReplicationTracker interface. This class is @@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Private public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements ReplicationTracker { - private static final Log LOG = LogFactory.getLog(ReplicationTrackerZKImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationTrackerZKImpl.class); // All about stopping private final Stoppable stopper; // listeners to be notified diff --git a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java index 546464344d4..b6c849c7351 100644 --- a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java +++ b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.replication; import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -59,7 +59,7 @@ import java.util.TreeSet; public class TableBasedReplicationQueuesImpl extends ReplicationTableBase implements ReplicationQueues { - private static final Log LOG = LogFactory.getLog(TableBasedReplicationQueuesImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TableBasedReplicationQueuesImpl.class); // Common byte values used in replication offset tracking private static final byte[] INITIAL_OFFSET_BYTES = Bytes.toBytes(0L); diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index 58e988babbf..76b1e1fccff 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -269,8 +269,8 @@ commons-lang3 - commons-logging - commons-logging + org.slf4j + slf4j-api javax.xml.bind diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java index 921b17c97c8..9f353aab6c7 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java @@ -27,9 +27,9 @@ import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.rest.model.CellModel; @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.rest.model.RowModel; @InterfaceAudience.Private public class MultiRowResource extends ResourceBase implements Constants { - private static final Log LOG = LogFactory.getLog(MultiRowResource.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiRowResource.class); TableResource tableResource; Integer versions = null; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java index 4faf1d18eb9..3ff25f99ef7 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java @@ -34,11 +34,11 @@ import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.rest.model.NamespacesInstanceModel; import org.apache.hadoop.hbase.rest.model.TableListModel; @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel; @InterfaceAudience.Private public class NamespacesInstanceResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(NamespacesInstanceResource.class); + private static final Logger LOG = LoggerFactory.getLogger(NamespacesInstanceResource.class); String namespace; boolean queryTables = false; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java index 4c5390aa6f9..fe48bafd473 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/NamespacesResource.java @@ -30,9 +30,9 @@ import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.rest.model.NamespacesModel; /** @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.rest.model.NamespacesModel; @InterfaceAudience.Private public class NamespacesResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(NamespacesResource.class); + private static final Logger LOG = LoggerFactory.getLogger(NamespacesResource.class); /** * Constructor diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java index b06704496d1..5ea8a316d39 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ProtobufStreamingOutput.java @@ -24,8 +24,6 @@ import java.util.List; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.StreamingOutput; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Result; @@ -34,9 +32,11 @@ import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ProtobufStreamingOutput implements StreamingOutput { - private static final Log LOG = LogFactory.getLog(ProtobufStreamingOutput.class); + private static final Logger LOG = LoggerFactory.getLogger(ProtobufStreamingOutput.class); private String contentType; private ResultScanner resultScanner; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java index 25aecdbd022..b30dc2a6a08 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java @@ -32,13 +32,12 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.http.InfoServer; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.rest.filter.AuthFilter; import org.apache.hadoop.hbase.rest.filter.GzipFilter; import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter; @@ -68,6 +67,8 @@ import org.eclipse.jetty.servlet.FilterHolder; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.DispatcherType; @@ -82,7 +83,7 @@ import javax.servlet.DispatcherType; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class RESTServer implements Constants { - static Log LOG = LogFactory.getLog("RESTServer"); + static Logger LOG = LoggerFactory.getLogger("RESTServer"); static String REST_CSRF_ENABLED_KEY = "hbase.rest.csrf.enabled"; static boolean REST_CSRF_ENABLED_DEFAULT = false; @@ -358,7 +359,7 @@ public class RESTServer implements Constants { server.start(); server.join(); } catch (Exception e) { - LOG.fatal("Failed to start server", e); + LOG.error(HBaseMarkers.FATAL, "Failed to start server", e); System.exit(1); } LOG.info("***** STOPPING service '" + RESTServer.class.getSimpleName() + "' *****"); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java index 1e5d4a98c5c..b2fa16dde26 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.rest; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.ParseFilter; @@ -38,7 +38,7 @@ import org.apache.hadoop.security.authorize.ProxyUsers; */ @InterfaceAudience.Private public class RESTServlet implements Constants { - private static final Log LOG = LogFactory.getLog(RESTServlet.class); + private static final Logger LOG = LoggerFactory.getLogger(RESTServlet.class); private static RESTServlet INSTANCE; private final Configuration conf; private final MetricsREST metrics; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java index 183262d2d22..1e0f7beb9a0 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java @@ -29,8 +29,6 @@ import javax.ws.rs.core.UriInfo; import java.io.IOException; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -41,10 +39,12 @@ import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.rest.model.TableInfoModel; import org.apache.hadoop.hbase.rest.model.TableRegionModel; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class RegionsResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(RegionsResource.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionsResource.class); static CacheControl cacheControl; static { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java index d2ddb0d76cd..98217451c5b 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java @@ -31,10 +31,9 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import javax.ws.rs.core.Response.ResponseBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.rest.model.TableListModel; import org.apache.hadoop.hbase.rest.model.TableModel; @@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.rest.model.TableModel; @Path("/") @InterfaceAudience.Private public class RootResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(RootResource.class); + private static final Logger LOG = LoggerFactory.getLogger(RootResource.class); static CacheControl cacheControl; static { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java index 8c1cb5b2cfe..b440fdf3a47 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java @@ -37,8 +37,6 @@ import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.UriInfo; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; @@ -56,10 +54,12 @@ import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class RowResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(RowResource.class); + private static final Logger LOG = LoggerFactory.getLogger(RowResource.class); private static final String CHECK_PUT = "put"; private static final String CHECK_DELETE = "delete"; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java index 1edd73a063b..9571c82d1b9 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java @@ -23,10 +23,10 @@ import java.io.IOException; import java.util.Iterator; import java.util.NoSuchElementException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.client.Get; @@ -37,7 +37,7 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private public class RowResultGenerator extends ResultGenerator { - private static final Log LOG = LogFactory.getLog(RowResultGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(RowResultGenerator.class); private Iterator valuesI; private Cell cache; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java index 8f5611589b4..b3e3985423e 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java @@ -31,12 +31,12 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.UriInfo; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; @@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private public class ScannerInstanceResource extends ResourceBase { - private static final Log LOG = - LogFactory.getLog(ScannerInstanceResource.class); + private static final Logger LOG = + LoggerFactory.getLogger(ScannerInstanceResource.class); static CacheControl cacheControl; static { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java index 60b348ee7ff..d2b173fa0c2 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java @@ -35,10 +35,9 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.rest.model.ScannerModel; @@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel; @InterfaceAudience.Private public class ScannerResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(ScannerResource.class); + private static final Logger LOG = LoggerFactory.getLogger(ScannerResource.class); static final Map scanners = Collections.synchronizedMap(new HashMap()); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java index ece4f1249ba..b622fede6bb 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java @@ -22,14 +22,14 @@ package org.apache.hadoop.hbase.rest; import java.io.IOException; import java.util.Iterator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; @@ -42,8 +42,8 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private public class ScannerResultGenerator extends ResultGenerator { - private static final Log LOG = - LogFactory.getLog(ScannerResultGenerator.class); + private static final Logger LOG = + LoggerFactory.getLogger(ScannerResultGenerator.class); public static Filter buildFilterFromModel(final ScannerModel model) throws Exception { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java index 8ce59eb4831..e617cd4426f 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java @@ -35,8 +35,6 @@ import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.UriInfo; import javax.xml.namespace.QName; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableExistsException; @@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel; @@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel; @InterfaceAudience.Private public class SchemaResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(SchemaResource.class); + private static final Logger LOG = LoggerFactory.getLogger(SchemaResource.class); static CacheControl cacheControl; static { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java index 460f86a68ae..90ebccb47e7 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java @@ -30,10 +30,9 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.UriInfo; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.RegionLoad; @@ -43,8 +42,8 @@ import org.apache.hadoop.hbase.rest.model.StorageClusterStatusModel; @InterfaceAudience.Private public class StorageClusterStatusResource extends ResourceBase { - private static final Log LOG = - LogFactory.getLog(StorageClusterStatusResource.class); + private static final Logger LOG = + LoggerFactory.getLogger(StorageClusterStatusResource.class); static CacheControl cacheControl; static { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java index 42f531cc7c2..3d70410a402 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java @@ -30,17 +30,16 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import javax.ws.rs.core.Response.ResponseBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ClusterStatus.Option; import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel; @InterfaceAudience.Private public class StorageClusterVersionResource extends ResourceBase { - private static final Log LOG = - LogFactory.getLog(StorageClusterVersionResource.class); + private static final Logger LOG = + LoggerFactory.getLogger(StorageClusterVersionResource.class); static CacheControl cacheControl; static { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java index b32db7f19cc..b52b91ba104 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java @@ -21,21 +21,16 @@ package org.apache.hadoop.hbase.rest; import java.io.IOException; import java.util.List; - import javax.ws.rs.DefaultValue; import javax.ws.rs.Encoded; -import javax.ws.rs.HeaderParam; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.UriInfo; - import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; @@ -48,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes; public class TableResource extends ResourceBase { String table; - private static final Log LOG = LogFactory.getLog(TableResource.class); + private static final Logger LOG = LoggerFactory.getLogger(TableResource.class); /** * Constructor @@ -206,7 +201,7 @@ public class TableResource extends ResourceBase { } catch (IOException exp) { servlet.getMetrics().incrementFailedScanRequests(1); processException(exp); - LOG.warn(exp); + LOG.warn(exp.toString(), exp); return null; } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java index f8b959331d3..05bb0d6a050 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableScanResource.java @@ -36,22 +36,22 @@ import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.RowModel; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; @InterfaceAudience.Private public class TableScanResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(TableScanResource.class); + private static final Logger LOG = LoggerFactory.getLogger(TableScanResource.class); TableResource tableResource; ResultScanner results; @@ -126,7 +126,7 @@ public class TableScanResource extends ResourceBase { } catch (Exception exp) { servlet.getMetrics().incrementFailedScanRequests(1); processException(exp); - LOG.warn(exp); + LOG.warn(exp.toString(), exp); return null; } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java index 38478408935..c212334153f 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java @@ -31,10 +31,9 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import javax.ws.rs.core.Response.ResponseBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.rest.model.VersionModel; /** @@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.rest.model.VersionModel; @InterfaceAudience.Private public class VersionResource extends ResourceBase { - private static final Log LOG = LogFactory.getLog(VersionResource.class); + private static final Logger LOG = LoggerFactory.getLogger(VersionResource.class); static CacheControl cacheControl; static { diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java index c756a795348..d8cf5f4a118 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java @@ -29,9 +29,9 @@ import java.util.Collections; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; @@ -55,7 +55,7 @@ import org.apache.http.util.EntityUtils; public class Client { public static final Header[] EMPTY_HEADER_ARRAY = new Header[0]; - private static final Log LOG = LogFactory.getLog(Client.class); + private static final Logger LOG = LoggerFactory.getLogger(Client.class); private HttpClient httpClient; private Cluster cluster; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index 71001b0fc02..bb48243ad7b 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -23,8 +23,7 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.Message; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -35,6 +34,8 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -84,7 +85,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Public public class RemoteHTable implements Table { - private static final Log LOG = LogFactory.getLog(RemoteHTable.class); + private static final Logger LOG = LoggerFactory.getLogger(RemoteHTable.class); final Client client; final Configuration conf; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java index 8f68f664baf..adffc126b7b 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java @@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.rest.client; import java.io.IOException; import java.io.InputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.http.Header; import org.apache.http.HttpResponse; @@ -33,7 +33,7 @@ import org.apache.http.HttpResponse; */ @InterfaceAudience.Public public class Response { - private static final Log LOG = LogFactory.getLog(Response.class); + private static final Logger LOG = LoggerFactory.getLogger(Response.class); private int code; private Header[] headers; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java index f051bc8212d..5dfa58caec6 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/AuthFilter.java @@ -28,17 +28,17 @@ import java.util.Properties; import javax.servlet.FilterConfig; import javax.servlet.ServletException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class AuthFilter extends AuthenticationFilter { - private static final Log LOG = LogFactory.getLog(AuthFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(AuthFilter.class); private static final String REST_PREFIX = "hbase.rest.authentication."; private static final int REST_PREFIX_LEN = REST_PREFIX.length(); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java index 76dc70e7379..31a437a1785 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java @@ -34,10 +34,10 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; /** @@ -50,8 +50,8 @@ import org.apache.hadoop.conf.Configuration; @InterfaceAudience.Public public class RestCsrfPreventionFilter implements Filter { - private static final Log LOG = - LogFactory.getLog(RestCsrfPreventionFilter.class); + private static final Logger LOG = + LoggerFactory.getLogger(RestCsrfPreventionFilter.class); public static final String HEADER_USER_AGENT = "User-Agent"; public static final String BROWSER_USER_AGENT_PARAM = diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java index 4483bdbe50f..882bd983d04 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java @@ -32,9 +32,9 @@ import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.ext.MessageBodyReader; import javax.ws.rs.ext.Provider; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.rest.Constants; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; @@ -47,8 +47,8 @@ import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; @InterfaceAudience.Private public class ProtobufMessageBodyConsumer implements MessageBodyReader { - private static final Log LOG = - LogFactory.getLog(ProtobufMessageBodyConsumer.class); + private static final Logger LOG = + LoggerFactory.getLogger(ProtobufMessageBodyConsumer.class); @Override public boolean isReadable(Class type, Type genericType, diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java index 7ad162431a5..5af8ee2bfaf 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/DummyFilter.java @@ -28,11 +28,11 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class DummyFilter implements Filter { - private static final Log LOG = LogFactory.getLog(DummyFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(DummyFilter.class); @Override public void destroy() { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java index 4cce21b3692..273010a334d 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.rest; import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.http.HttpServerUtil; @@ -36,6 +34,8 @@ import org.eclipse.jetty.servlet.ServletHolder; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.DispatcherType; import java.util.Arrays; @@ -43,7 +43,7 @@ import java.util.EnumSet; public class HBaseRESTTestingUtility { - private static final Log LOG = LogFactory.getLog(HBaseRESTTestingUtility.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseRESTTestingUtility.class); private int testServletPort; private Server server; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java index 476594e0807..21d25e289e5 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java @@ -34,8 +34,6 @@ import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataInputStream; @@ -62,7 +60,6 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; @@ -94,6 +91,8 @@ import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Script used evaluating Stargate performance and scalability. Runs a SG @@ -112,7 +111,8 @@ import org.apache.hadoop.util.ToolRunner; * runs an individual client. Each client does about 1GB of data. */ public class PerformanceEvaluation extends Configured implements Tool { - protected static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName()); + protected static final Logger LOG = + LoggerFactory.getLogger(PerformanceEvaluation.class); private static final int DEFAULT_ROW_PREFIX_LENGTH = 16; private static final int ROW_LENGTH = 1000; @@ -418,7 +418,7 @@ public class PerformanceEvaluation extends Configured implements Tool { } key = NullWritable.get(); - value = (PeInputSplit)split; + value = split; readOver = true; return true; @@ -490,10 +490,12 @@ public class PerformanceEvaluation extends Configured implements Tool { return clazz; } + @Override protected void map(NullWritable key, PeInputSplit value, final Context context) throws IOException, InterruptedException { Status status = new Status() { + @Override public void setStatus(String msg) { context.setStatus(msg); } @@ -635,6 +637,7 @@ public class PerformanceEvaluation extends Configured implements Tool { long elapsedTime = pe.runOneClient(cmd, index * perClientRows, perClientRows, R, flushCommits, writeToWAL, useTags, noOfTags, connection, new Status() { + @Override public void setStatus(final String msg) throws IOException { LOG.info("client-" + getName() + " " + msg); } @@ -956,6 +959,7 @@ public class PerformanceEvaluation extends Configured implements Tool { super(conf, options, status); } + @Override void testSetup() throws IOException { this.table = connection.getTable(tableName); } @@ -975,10 +979,12 @@ public class PerformanceEvaluation extends Configured implements Tool { this.flushCommits = options.isFlushCommits(); } + @Override void testSetup() throws IOException { this.mutator = connection.getBufferedMutator(tableName); } + @Override void testTakedown() throws IOException { if (flushCommits) { this.mutator.flush(); @@ -1214,7 +1220,7 @@ public class PerformanceEvaluation extends Configured implements Tool { } static class FilteredScanTest extends TableTest { - protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName()); + protected static final Logger LOG = LoggerFactory.getLogger(FilteredScanTest.class.getName()); FilteredScanTest(Configuration conf, TestOptions options, Status status) { super(conf, options, status); @@ -1327,6 +1333,7 @@ public class PerformanceEvaluation extends Configured implements Tool { private void runNIsOne(final Class cmd) { Status status = new Status() { + @Override public void setStatus(String msg) throws IOException { LOG.info(msg); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java index 2b2e5e3700d..179befe007f 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java @@ -37,14 +37,11 @@ import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; -import org.apache.http.Header; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; @@ -62,14 +59,17 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.http.Header; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RestTests.class, MediumTests.class}) public class TestScannerResource { - private static final Log LOG = LogFactory.getLog(TestScannerResource.class); + private static final Logger LOG = LoggerFactory.getLogger(TestScannerResource.class); private static final TableName TABLE = TableName.valueOf("TestScannerResource"); private static final TableName TABLE_TO_BE_DISABLED = TableName.valueOf("ScannerResourceDisable"); private static final String NONEXISTENT_TABLE = "ThisTableDoesNotExist"; @@ -79,7 +79,7 @@ public class TestScannerResource { private static final String COLUMN_2 = CFB + ":2"; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final HBaseRESTTestingUtility REST_TEST_UTIL = + private static final HBaseRESTTestingUtility REST_TEST_UTIL = new HBaseRESTTestingUtility(); private static Client client; private static JAXBContext context; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java index c8bbc24149e..614b1a10ef0 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java @@ -34,8 +34,6 @@ import javax.xml.bind.JAXBContext; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -79,11 +77,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RestTests.class, MediumTests.class}) public class TestScannersWithFilters { - private static final Log LOG = LogFactory.getLog(TestScannersWithFilters.class); + private static final Logger LOG = LoggerFactory.getLogger(TestScannersWithFilters.class); private static final TableName TABLE = TableName.valueOf("TestScannersWithFilters"); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java index 4866d53c2f8..04b23fa59fc 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java @@ -26,8 +26,6 @@ import java.util.Collection; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.http.Header; import org.apache.http.message.BasicHeader; @@ -55,11 +53,13 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RestTests.class, MediumTests.class}) @RunWith(Parameterized.class) public class TestSchemaResource { - private static final Log LOG = LogFactory.getLog(TestSchemaResource.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSchemaResource.class); private static String TABLE1 = "TestSchemaResource1"; private static String TABLE2 = "TestSchemaResource2"; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java index ca3b82f4ef6..e86a4f8c0a2 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java @@ -24,8 +24,6 @@ import java.io.IOException; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -44,10 +42,12 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RestTests.class, MediumTests.class}) public class TestStatusResource { - private static final Log LOG = LogFactory.getLog(TestStatusResource.class); + private static final Logger LOG = LoggerFactory.getLogger(TestStatusResource.class); private static final byte[] META_REGION_NAME = Bytes.toBytes(TableName.META_TABLE_NAME + ",,1"); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java index 26891774b74..55913499ef5 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java @@ -32,8 +32,6 @@ import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -64,10 +62,12 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RestTests.class, MediumTests.class}) public class TestTableResource { - private static final Log LOG = LogFactory.getLog(TestTableResource.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableResource.class); private static TableName TABLE = TableName.valueOf("TestTableResource"); private static String COLUMN_FAMILY = "test"; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java index 8380a0a4fb3..a10fef013cb 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java @@ -43,8 +43,6 @@ import javax.xml.bind.annotation.XmlRootElement; import javax.xml.parsers.SAXParserFactory; import javax.xml.stream.XMLStreamException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -67,6 +65,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; @@ -78,7 +78,7 @@ import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider; @Category({RestTests.class, MediumTests.class}) public class TestTableScan { - private static final Log LOG = LogFactory.getLog(TestTableScan.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableScan.class); private static final TableName TABLE = TableName.valueOf("TestScanResource"); private static final String CFA = "a"; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java index 1f927f597e0..50cb0854b3e 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java @@ -25,8 +25,6 @@ import javax.ws.rs.core.MediaType; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Cluster; @@ -48,10 +46,12 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RestTests.class, MediumTests.class}) public class TestVersionResource { - private static final Log LOG = LogFactory.getLog(TestVersionResource.class); + private static final Logger LOG = LoggerFactory.getLogger(TestVersionResource.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final HBaseRESTTestingUtility REST_TEST_UTIL = diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java index 586e33c1839..cf5519e7a7d 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java @@ -25,8 +25,6 @@ import static org.mockito.Mockito.when; import java.io.IOException; import javax.xml.bind.UnmarshalException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.rest.Constants; import org.apache.hadoop.hbase.rest.model.StorageClusterVersionModel; @@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.util.StringUtils; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test class for {@link RemoteAdmin} to verify XML is parsed in a certain manner. */ @Category(SmallTests.class) public class TestXmlParsing { - private static final Log LOG = LogFactory.getLog(TestXmlParsing.class); + private static final Logger LOG = LoggerFactory.getLogger(TestXmlParsing.class); @Test public void testParsingClusterVersion() throws Exception { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java index c41128d7375..81de4361bba 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java @@ -23,16 +23,16 @@ import java.util.Iterator; import javax.xml.bind.JAXBContext; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RestTests.class, SmallTests.class}) public class TestTableSchemaModel extends TestModelBase { - private static final Log LOG = LogFactory.getLog(TestTableSchemaModel.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableSchemaModel.class); public static final String TABLE_NAME = "testTable"; private static final boolean IS_META = false; diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml index de5b9496747..123863a87b3 100644 --- a/hbase-rsgroup/pom.xml +++ b/hbase-rsgroup/pom.xml @@ -124,8 +124,8 @@ commons-lang3 - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.hbase.thirdparty diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java index 433f7bc6bc0..c6e9bc67ac9 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java @@ -29,8 +29,7 @@ import java.util.stream.Collectors; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.Service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -78,12 +77,14 @@ import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveServe import org.apache.hadoop.hbase.protobuf.generated.TableProtos; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; // TODO: Encapsulate MasterObserver functions into separate subclass. @CoreCoprocessor @InterfaceAudience.Private public class RSGroupAdminEndpoint implements MasterCoprocessor, MasterObserver { - private static final Log LOG = LogFactory.getLog(RSGroupAdminEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(RSGroupAdminEndpoint.class); private MasterServices master = null; // Only instance of RSGroupInfoManager. RSGroup aware load balancers ask for this instance on diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java index 45421e325b1..b4d35e306c8 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java @@ -29,8 +29,6 @@ import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -48,13 +46,15 @@ import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Service to support Region Server Grouping (HBase-6721). */ @InterfaceAudience.Private public class RSGroupAdminServer implements RSGroupAdmin { - private static final Log LOG = LogFactory.getLog(RSGroupAdminServer.class); + private static final Logger LOG = LoggerFactory.getLogger(RSGroupAdminServer.class); private MasterServices master; private final RSGroupInfoManager rsGroupInfoManager; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java index d838edb94c2..60af99321eb 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java @@ -29,8 +29,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseIOException; @@ -52,6 +51,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GroupBasedLoadBalancer, used when Region Server Grouping is configured (HBase-6721) @@ -69,7 +70,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class RSGroupBasedLoadBalancer implements RSGroupableBalancer { - private static final Log LOG = LogFactory.getLog(RSGroupBasedLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(RSGroupBasedLoadBalancer.class); private Configuration config; private ClusterStatus clusterStatus; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java index 4d8ff926091..67dfde729f4 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java @@ -34,8 +34,6 @@ import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -81,7 +79,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @@ -116,7 +115,7 @@ import com.google.protobuf.ServiceException; */ @InterfaceAudience.Private class RSGroupInfoManagerImpl implements RSGroupInfoManager { - private static final Log LOG = LogFactory.getLog(RSGroupInfoManagerImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(RSGroupInfoManagerImpl.class); /** Table descriptor for hbase:rsgroup catalog table */ private final static HTableDescriptor RSGROUP_TABLE_DESC; @@ -624,7 +623,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager { * done asynchronously in this thread. */ private class ServerEventsListenerThread extends Thread implements ServerListener { - private final Log LOG = LogFactory.getLog(ServerEventsListenerThread.class); + private final Logger LOG = LoggerFactory.getLogger(ServerEventsListenerThread.class); private boolean changed = false; ServerEventsListenerThread() { @@ -738,7 +737,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager { } private class RSGroupStartupWorker extends Thread { - private final Log LOG = LogFactory.getLog(RSGroupStartupWorker.class); + private final Logger LOG = LoggerFactory.getLogger(RSGroupStartupWorker.class); private volatile boolean online = false; RSGroupStartupWorker() { diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java index 5ce0c09f67a..797022c169a 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java @@ -36,8 +36,6 @@ import java.util.TreeMap; import java.util.TreeSet; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HTableDescriptor; @@ -63,7 +61,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(SmallTests.class) public class TestRSGroupBasedLoadBalancer { - private static final Log LOG = LogFactory.getLog(TestRSGroupBasedLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRSGroupBasedLoadBalancer.class); private static RSGroupBasedLoadBalancer loadBalancer; private static SecureRandom rand; diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java index f2ae112c602..378c1ab0a99 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java @@ -24,8 +24,6 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.Iterator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -53,12 +51,13 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @Category({MediumTests.class}) public class TestRSGroups extends TestRSGroupsBase { - protected static final Log LOG = LogFactory.getLog(TestRSGroups.class); + protected static final Logger LOG = LoggerFactory.getLogger(TestRSGroups.class); private static HMaster master; private static boolean INIT = false; private static RSGroupAdminEndpoint rsGroupAdminEndpoint; diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java index bbcf1206af2..cd8c3869e5e 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java @@ -34,8 +34,6 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus.Option; import org.apache.hadoop.hbase.HBaseCluster; @@ -59,7 +57,8 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest; public abstract class TestRSGroupsBase { - protected static final Log LOG = LogFactory.getLog(TestRSGroupsBase.class); + protected static final Logger LOG = LoggerFactory.getLogger(TestRSGroupsBase.class); @Rule public TestName name = new TestName(); diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java index 6f7b47d554b..dd190fe1f95 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.rsgroup; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + import org.apache.hadoop.hbase.HBaseCluster; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -41,11 +41,10 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; // This tests that GroupBasedBalancer will use data in zk to do balancing during master startup. // This does not test retain assignment. @@ -56,7 +55,7 @@ import static org.junit.Assert.assertFalse; // assignment with a timeout. @Category(MediumTests.class) public class TestRSGroupsOfflineMode { - private static final Log LOG = LogFactory.getLog(TestRSGroupsOfflineMode.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRSGroupsOfflineMode.class); private static HMaster master; private static Admin hbaseAdmin; private static HBaseTestingUtility TEST_UTIL; diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index 61be98a4322..01fc4d61bf8 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -405,7 +405,6 @@ org.apache.hbase hbase-resource-bundle - ${project.version} true @@ -492,13 +491,17 @@ commons-lang3 - commons-logging - commons-logging + org.slf4j + slf4j-api org.apache.commons commons-math3 + + org.slf4j + slf4j-log4j12 + log4j log4j diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java index 5f3531d2e09..8ab139f63cb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java @@ -17,17 +17,17 @@ */ package org.apache.hadoop.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The Class HealthCheckChore for running health checker regularly. */ public class HealthCheckChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(HealthCheckChore.class); + private static final Logger LOG = LoggerFactory.getLogger(HealthCheckChore.class); private HealthChecker healthChecker; private Configuration config; private int threshold; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java index 45e0f3aff9c..a43a51d8656 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HealthChecker.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A utility for executing an external script that checks the health of @@ -33,7 +33,7 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor; */ class HealthChecker { - private static final Log LOG = LogFactory.getLog(HealthChecker.class); + private static final Logger LOG = LoggerFactory.getLogger(HealthChecker.class); private ShellCommandExecutor shexec = null; private String exceptionStackTrace; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java index 14720570271..6fdc77e34d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java @@ -19,11 +19,12 @@ package org.apache.hadoop.hbase; import com.google.protobuf.Service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -49,7 +50,7 @@ import javax.management.remote.rmi.RMIConnectorServer; * 3)support subset of SSL (with default configuration) */ public class JMXListener implements MasterCoprocessor, RegionServerCoprocessor { - private static final Log LOG = LogFactory.getLog(JMXListener.class); + private static final Logger LOG = LoggerFactory.getLogger(JMXListener.class); public static final String RMI_REGISTRY_PORT_CONF_KEY = ".rmi.registry.port"; public static final String RMI_CONNECTOR_PORT_CONF_KEY = ".rmi.connector.port"; public static final int defMasterRMIRegistryPort = 10101; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java index e43d33bb0e3..06199f72d63 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java @@ -24,9 +24,9 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; @@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil; */ @InterfaceAudience.Public public class LocalHBaseCluster { - private static final Log LOG = LogFactory.getLog(LocalHBaseCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(LocalHBaseCluster.class); private final List masterThreads = new CopyOnWriteArrayList<>(); private final List regionThreads = new CopyOnWriteArrayList<>(); private final static int DEFAULT_NO = 1; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java index 9e228ad40c8..e088751a1a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java @@ -18,25 +18,25 @@ package org.apache.hadoop.hbase; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.zookeeper.ZKUtil; -import org.apache.hadoop.hbase.zookeeper.ZNodePaths; -import org.apache.hadoop.hbase.zookeeper.ZKListener; -import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.apache.zookeeper.KeeperException; - import java.io.IOException; import java.util.List; import java.util.NavigableMap; import java.util.NavigableSet; import java.util.concurrent.ConcurrentSkipListMap; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.ZKListener; +import org.apache.hadoop.hbase.zookeeper.ZKUtil; +import org.apache.hadoop.hbase.zookeeper.ZKWatcher; +import org.apache.hadoop.hbase.zookeeper.ZNodePaths; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; /** * Class servers two purposes: @@ -49,7 +49,7 @@ import java.util.concurrent.ConcurrentSkipListMap; */ @InterfaceAudience.Private public class ZKNamespaceManager extends ZKListener { - private static final Log LOG = LogFactory.getLog(ZKNamespaceManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKNamespaceManager.class); private final String nsZNode; private final NavigableMap cache; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java index cda5affac4f..3e911a8dd62 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZNodeClearer.java @@ -26,8 +26,6 @@ import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker; @@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

Contains a set of methods for the collaboration between the start/stop scripts and the @@ -49,7 +49,7 @@ import org.apache.zookeeper.KeeperException; * check its content to make sure that the backup server is not now in charge.

*/ public class ZNodeClearer { - private static final Log LOG = LogFactory.getLog(ZNodeClearer.class); + private static final Logger LOG = LoggerFactory.getLogger(ZNodeClearer.class); private ZNodeClearer() {} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java index 4da1235fd46..354a63caa66 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java @@ -25,8 +25,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.io.MultipleIOException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2; @@ -54,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ @InterfaceAudience.Private public class HFileArchiver { - private static final Log LOG = LogFactory.getLog(HFileArchiver.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileArchiver.class); private static final String SEPARATOR = "."; /** Number of retries in case of fs operation failure */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java index 389dea7fc0d..c51d4937a14 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.backup.example; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Client-side manager for which table's hfiles should be preserved for long-term archive. @@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException; class HFileArchiveManager { private final String archiveZnode; - private static final Log LOG = LogFactory.getLog(HFileArchiveManager.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveManager.class); private final ZKWatcher zooKeeper; private volatile boolean stopped = false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java index 3a1653417dd..93c9690d790 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/HFileArchiveTableMonitor.java @@ -21,8 +21,8 @@ import java.util.List; import java.util.Set; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Monitor the actual tables for which HFiles are archived for long-term retention (always kept @@ -31,7 +31,7 @@ import org.apache.commons.logging.LogFactory; * It is internally synchronized to ensure consistent view of the table state. */ public class HFileArchiveTableMonitor { - private static final Log LOG = LogFactory.getLog(HFileArchiveTableMonitor.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveTableMonitor.class); private final Set archivedTables = new TreeSet<>(); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java index ff7a51d0a8c..484ff5ea174 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.backup.example; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * {@link BaseHFileCleanerDelegate} that only cleans HFiles that don't belong to a table that is @@ -44,7 +44,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class LongTermArchivingHFileCleaner extends BaseHFileCleanerDelegate { - private static final Log LOG = LogFactory.getLog(LongTermArchivingHFileCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(LongTermArchivingHFileCleaner.class); TableHFileArchiveTracker archiveTracker; private FileSystem fs; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java index 73b50a652d9..1b3b775b2a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.backup.example; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Track HFile archiving state changes in ZooKeeper. Keeps track of the tables whose HFiles should @@ -39,7 +39,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.Private public class TableHFileArchiveTracker extends ZKListener { - private static final Log LOG = LogFactory.getLog(TableHFileArchiveTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(TableHFileArchiveTracker.class); public static final String HFILE_ARCHIVE_ZNODE_PARENT = "hfilearchive"; private HFileArchiveTableMonitor monitor; private String archiveHFileZNode; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java index 529a2f93d31..7a1a57814f2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A client scanner for a region opened for read-only on the client side. Assumes region data @@ -42,7 +42,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class ClientSideRegionScanner extends AbstractClientScanner { - private static final Log LOG = LogFactory.getLog(ClientSideRegionScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(ClientSideRegionScanner.class); private HRegion region; RegionScanner scanner; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java index 36b2bb282d1..93b1a4024c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java @@ -24,8 +24,6 @@ import java.util.Collections; import java.util.List; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A Scanner which performs a scan over snapshot files. Using this class requires copying the @@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class TableSnapshotScanner extends AbstractClientScanner { - private static final Log LOG = LogFactory.getLog(TableSnapshotScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(TableSnapshotScanner.class); private Configuration conf; private String snapshotName; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java index 94a573c6383..61a0238bca8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java @@ -23,11 +23,11 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse; @@ -81,7 +81,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Public public class EntityLock { - private static final Log LOG = LogFactory.getLog(EntityLock.class); + private static final Logger LOG = LoggerFactory.getLogger(EntityLock.class); public static final String HEARTBEAT_TIME_BUFFER = "hbase.client.locks.heartbeat.time.buffer.ms"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java index 1fa70f48cef..2bbb90bbf6a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.conf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.Collections; import java.util.Set; @@ -73,7 +73,7 @@ import java.util.WeakHashMap; @InterfaceAudience.Private @InterfaceStability.Evolving public class ConfigurationManager { - private static final Log LOG = LogFactory.getLog(ConfigurationManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ConfigurationManager.class); // The set of Configuration Observers. These classes would like to get // notified when the configuration is reloaded from disk. This is a set diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java index 582fabf4eb4..6aa5d977b67 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java @@ -22,9 +22,9 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.client.Put; @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.wal.WALEdit; @InterfaceAudience.Private public class ConstraintProcessor implements RegionCoprocessor, RegionObserver { - private static final Log LOG = LogFactory.getLog(ConstraintProcessor.class); + private static final Logger LOG = LoggerFactory.getLogger(ConstraintProcessor.class); private final ClassLoader classloader; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java index e675cc9c4a8..426e5161530 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java @@ -29,9 +29,9 @@ import java.util.Map; import java.util.Map.Entry; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor; @@ -53,7 +53,7 @@ public final class Constraints { private Constraints() { } - private static final Log LOG = LogFactory.getLog(Constraints.class); + private static final Logger LOG = LoggerFactory.getLogger(Constraints.class); private static final String CONSTRAINT_HTD_KEY_PREFIX = "constraint $"; private static final Pattern CONSTRAINT_HTD_ATTR_KEY_PATTERN = Pattern .compile(CONSTRAINT_HTD_KEY_PREFIX, Pattern.LITERAL); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java index 8a07b4b7c13..0cf23c9120d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZKSplitLogManagerCoordination.java @@ -29,8 +29,6 @@ import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; @@ -38,6 +36,7 @@ import org.apache.hadoop.hbase.SplitLogCounters; import org.apache.hadoop.hbase.SplitLogTask; import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination.TaskFinisher.Status; import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective; import org.apache.hadoop.hbase.master.SplitLogManager.Task; import org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus; @@ -57,7 +56,8 @@ import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.ZooDefs.Ids; import org.apache.zookeeper.data.Stat; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -72,7 +72,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements public static final int DEFAULT_ZK_RETRIES = 3; public static final int DEFAULT_MAX_RESUBMIT = 3; - private static final Log LOG = LogFactory.getLog(SplitLogManagerCoordination.class); + private static final Logger LOG = LoggerFactory.getLogger(SplitLogManagerCoordination.class); private final TaskFinisher taskFinisher; private final Configuration conf; @@ -301,7 +301,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements } private void createRescanFailure() { - LOG.fatal("logic failure, rescan failure must not happen"); + LOG.error(HBaseMarkers.FATAL, "logic failure, rescan failure must not happen"); } /** @@ -353,7 +353,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements return; } SplitLogCounters.tot_mgr_null_data.increment(); - LOG.fatal("logic error - got null data " + path); + LOG.error(HBaseMarkers.FATAL, "logic error - got null data " + path); setDone(path, FAILURE); return; } @@ -382,8 +382,8 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements LOG.info("task " + path + " entered state: " + slt.toString()); resubmitOrFail(path, CHECK); } else { - LOG.fatal("logic error - unexpected zk state for path = " + path + " data = " - + slt.toString()); + LOG.error(HBaseMarkers.FATAL, "logic error - unexpected zk state for path = " + + path + " data = " + slt.toString()); setDone(path, FAILURE); } } @@ -573,7 +573,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements * Asynchronous handler for zk create node results. Retries on failures. */ public class CreateAsyncCallback implements AsyncCallback.StringCallback { - private final Log LOG = LogFactory.getLog(CreateAsyncCallback.class); + private final Logger LOG = LoggerFactory.getLogger(CreateAsyncCallback.class); @Override public void processResult(int rc, String path, Object ctx, String name) { @@ -614,7 +614,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements * Asynchronous handler for zk get-data-set-watch on node results. Retries on failures. */ public class GetDataAsyncCallback implements AsyncCallback.DataCallback { - private final Log LOG = LogFactory.getLog(GetDataAsyncCallback.class); + private final Logger LOG = LoggerFactory.getLogger(GetDataAsyncCallback.class); @Override public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) { @@ -662,7 +662,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements * Asynchronous handler for zk delete node results. Retries on failures. */ public class DeleteAsyncCallback implements AsyncCallback.VoidCallback { - private final Log LOG = LogFactory.getLog(DeleteAsyncCallback.class); + private final Logger LOG = LoggerFactory.getLogger(DeleteAsyncCallback.class); @Override public void processResult(int rc, String path, Object ctx) { @@ -704,7 +704,7 @@ public class ZKSplitLogManagerCoordination extends ZKListener implements * {@link org.apache.hadoop.hbase.regionserver.SplitLogWorker}s to rescan for new tasks. */ public class CreateRescanAsyncCallback implements AsyncCallback.StringCallback { - private final Log LOG = LogFactory.getLog(CreateRescanAsyncCallback.class); + private final Logger LOG = LoggerFactory.getLogger(CreateRescanAsyncCallback.class); @Override public void processResult(int rc, String path, Object ctx, String name) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java index 0540a8f8580..bcba101b313 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/ZkSplitLogWorkerCoordination.java @@ -26,8 +26,6 @@ import java.util.concurrent.atomic.LongAdder; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.mutable.MutableInt; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -36,6 +34,7 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SplitLogCounters; import org.apache.hadoop.hbase.SplitLogTask; import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.SplitLogWorker; import org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor; @@ -54,6 +53,8 @@ import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.AsyncCallback; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ZooKeeper based implementation of {@link SplitLogWorkerCoordination} @@ -64,7 +65,7 @@ import org.apache.zookeeper.data.Stat; public class ZkSplitLogWorkerCoordination extends ZKListener implements SplitLogWorkerCoordination { - private static final Log LOG = LogFactory.getLog(ZkSplitLogWorkerCoordination.class); + private static final Logger LOG = LoggerFactory.getLogger(ZkSplitLogWorkerCoordination.class); private static final int checkInterval = 5000; // 5 seconds private static final int FAILED_TO_OWN_TASK = -1; @@ -539,7 +540,7 @@ public class ZkSplitLogWorkerCoordination extends ZKListener implements * Asynchronous handler for zk get-data-set-watch on node results. */ class GetDataAsyncCallback implements AsyncCallback.DataCallback { - private final Log LOG = LogFactory.getLog(GetDataAsyncCallback.class); + private final Logger LOG = LoggerFactory.getLogger(GetDataAsyncCallback.class); @Override public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) { @@ -580,7 +581,7 @@ public class ZkSplitLogWorkerCoordination extends ZKListener implements LOG.warn("transisition task " + task + " to " + slt + " failed because of version mismatch", bve); } catch (KeeperException.NoNodeException e) { - LOG.fatal( + LOG.error(HBaseMarkers.FATAL, "logic error - end task " + task + " " + slt + " failed because task doesn't exist", e); } catch (KeeperException e) { LOG.warn("failed to end task, " + task + " " + slt, e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java index 9f5ca231f5a..2818dcd675f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java @@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.coprocessor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; @@ -34,7 +34,7 @@ import java.io.IOException; */ @InterfaceAudience.Private public class BaseEnvironment implements CoprocessorEnvironment { - private static final Log LOG = LogFactory.getLog(BaseEnvironment.class); + private static final Logger LOG = LoggerFactory.getLogger(BaseEnvironment.class); /** The coprocessor */ public C impl; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index f2d9b2a8770..9489d69dacb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -32,9 +32,9 @@ import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Abortable; @@ -75,7 +75,7 @@ public abstract class CoprocessorHost coprocEnvironments = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java index 75117fd6e80..87f7c00064c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.errorhandling; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The dispatcher acts as the state holding entity for foreign error handling. The first @@ -40,7 +40,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class ForeignExceptionDispatcher implements ForeignExceptionListener, ForeignExceptionSnare { - private static final Log LOG = LogFactory.getLog(ForeignExceptionDispatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(ForeignExceptionDispatcher.class); protected final String name; protected final List listeners = new ArrayList<>(); private ForeignException exception; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java index 294e108ebda..36182d677d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.errorhandling; import java.util.Timer; import java.util.TimerTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; /** @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.Private public class TimeoutExceptionInjector { - private static final Log LOG = LogFactory.getLog(TimeoutExceptionInjector.class); + private static final Logger LOG = LoggerFactory.getLogger(TimeoutExceptionInjector.class); private final long maxTime; private volatile boolean complete; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java index 1056c20453b..eb94744299f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java @@ -21,14 +21,14 @@ package org.apache.hadoop.hbase.executor; import java.io.IOException; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.htrace.core.Span; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstract base class for all HBase event handlers. Subclasses should @@ -53,7 +53,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public abstract class EventHandler implements Runnable, Comparable { - private static final Log LOG = LogFactory.getLog(EventHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(EventHandler.class); // type of event this object represents protected EventType eventType; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java index 7117d360d1b..4cd800c178c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java @@ -32,9 +32,9 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.monitoring.ThreadMonitoring; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa */ @InterfaceAudience.Private public class ExecutorService { - private static final Log LOG = LogFactory.getLog(ExecutorService.class); + private static final Logger LOG = LoggerFactory.getLogger(ExecutorService.class); // hold the all the executors created in a map addressable by their names private final ConcurrentHashMap executorMap = new ConcurrentHashMap<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java index 136453a99cf..74f7c69c416 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java @@ -32,8 +32,6 @@ import java.util.Random; import java.util.Set; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; @@ -52,6 +50,9 @@ import org.apache.hadoop.hbase.master.RackManager; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -67,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNode */ @InterfaceAudience.Private public class FavoredNodeAssignmentHelper { - private static final Log LOG = LogFactory.getLog(FavoredNodeAssignmentHelper.class); + private static final Logger LOG = LoggerFactory.getLogger(FavoredNodeAssignmentHelper.class); private RackManager rackManager; private Map> rackToRegionServerMap; private List uniqueRackList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java index 68e5e897d35..a2cfa8543a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java @@ -29,8 +29,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.master.SnapshotOfRegionAssignmentFromMeta; import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class FavoredNodeLoadBalancer extends BaseLoadBalancer implements FavoredNodesPromoter { - private static final Log LOG = LogFactory.getLog(FavoredNodeLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(FavoredNodeLoadBalancer.class); private RackManager rackManager; private Configuration conf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java index 7705b3d1b74..e0b9dc5b5d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java @@ -32,8 +32,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.ServerName; @@ -45,7 +43,8 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.net.NetUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @InterfaceAudience.Private public class FavoredNodesManager { - private static final Log LOG = LogFactory.getLog(FavoredNodesManager.class); + private static final Logger LOG = LoggerFactory.getLogger(FavoredNodesManager.class); private FavoredNodesPlan globalFavoredNodesAssignmentPlan; private Map> primaryRSToRegionMap; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java index c48d9d679f5..0723f855afe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java @@ -31,8 +31,6 @@ import java.lang.reflect.Proxy; import java.lang.reflect.UndeclaredThrowableException; import java.net.URI; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -54,6 +52,8 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.Nullable; @@ -63,7 +63,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; * separate filesystem objects for reading and writing hfiles and wals. */ public class HFileSystem extends FilterFileSystem { - public static final Log LOG = LogFactory.getLog(HFileSystem.class); + public static final Logger LOG = LoggerFactory.getLogger(HFileSystem.class); private final FileSystem noChecksumFs; // read hfile data from storage private final boolean useHBaseChecksum; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java index cd3843f12a5..faeac788aea 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java @@ -24,12 +24,12 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class FSDataInputStreamWrapper implements Closeable { - private static final Log LOG = LogFactory.getLog(FSDataInputStreamWrapper.class); + private static final Logger LOG = LoggerFactory.getLogger(FSDataInputStreamWrapper.class); private static final boolean isLogTraceEnabled = LOG.isTraceEnabled(); private final HFileSystem hfs; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java index 52597b8297b..42f3483648b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java @@ -26,9 +26,9 @@ import java.io.InputStream; import java.io.FileNotFoundException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.CanSetDropBehind; import org.apache.hadoop.fs.CanSetReadahead; import org.apache.hadoop.fs.FSDataInputStream; @@ -92,7 +92,7 @@ import org.apache.hadoop.ipc.RemoteException; */ @InterfaceAudience.Private public class FileLink { - private static final Log LOG = LogFactory.getLog(FileLink.class); + private static final Logger LOG = LoggerFactory.getLogger(FileLink.class); /** Define the Back-reference directory name prefix: .links-<hfile>/ */ public static final String BACK_REFERENCES_DIRECTORY_PREFIX = ".links-"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java index ee1038608bb..2aebdf0d04e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HFileLink describes a link to an hfile. @@ -58,7 +58,7 @@ import org.apache.yetus.audience.InterfaceAudience; @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="EQ_DOESNT_OVERRIDE_EQUALS", justification="To be fixed but warning suppressed for now") public class HFileLink extends FileLink { - private static final Log LOG = LogFactory.getLog(HFileLink.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileLink.class); /** * A non-capture group, for HFileLink, so that this can be embedded. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java index 1dfffd6cff4..80207eb73ea 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java @@ -23,9 +23,9 @@ import java.nio.ByteBuffer; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Private public class HalfStoreFileReader extends StoreFileReader { - private static final Log LOG = LogFactory.getLog(HalfStoreFileReader.class); + private static final Logger LOG = LoggerFactory.getLogger(HalfStoreFileReader.class); final boolean top; // This is the key we split around. Its the first possible entry on a row: // i.e. empty column and a timestamp of LATEST_TIMESTAMP. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java index a7c26e00771..41057f17412 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java @@ -38,8 +38,6 @@ import java.util.EnumSet; import java.util.List; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CryptoProtocolVersion; import org.apache.hadoop.crypto.Encryptor; @@ -85,7 +83,8 @@ import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap; @@ -116,8 +115,8 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise; */ @InterfaceAudience.Private public final class FanOutOneBlockAsyncDFSOutputHelper { - - private static final Log LOG = LogFactory.getLog(FanOutOneBlockAsyncDFSOutputHelper.class); + private static final Logger LOG = + LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class); private FanOutOneBlockAsyncDFSOutputHelper() { } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java index 458df27ac2a..20c8da742cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java @@ -74,8 +74,6 @@ import javax.security.sasl.SaslException; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CipherOption; import org.apache.hadoop.crypto.CipherSuite; @@ -85,6 +83,9 @@ import org.apache.hadoop.crypto.Encryptor; import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion; import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.google.protobuf.ByteString; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; @@ -107,8 +108,8 @@ import org.apache.hadoop.security.token.Token; */ @InterfaceAudience.Private public final class FanOutOneBlockAsyncDFSOutputSaslHelper { - - private static final Log LOG = LogFactory.getLog(FanOutOneBlockAsyncDFSOutputSaslHelper.class); + private static final Logger LOG = + LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputSaslHelper.class); private FanOutOneBlockAsyncDFSOutputSaslHelper() { } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java index a071fbdc89b..242463cd65f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java @@ -22,11 +22,11 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class CacheConfig { - private static final Log LOG = LogFactory.getLog(CacheConfig.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CacheConfig.class.getName()); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java index 8342788ea49..5eb182640fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java @@ -21,10 +21,10 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.ChecksumException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.util.DataChecksum; @@ -33,7 +33,7 @@ import org.apache.hadoop.util.DataChecksum; */ @InterfaceAudience.Private public class ChecksumUtil { - public static final Log LOG = LogFactory.getLog(ChecksumUtil.class); + public static final Logger LOG = LoggerFactory.getLogger(ChecksumUtil.class); /** This is used to reserve space in a byte buffer */ private static byte[] DUMMY_VALUE = new byte[128 * HFileBlock.CHECKSUM_SIZE]; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java index 7e5db088ffa..0b58b215617 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompoundBloomFilterWriter.java @@ -23,14 +23,14 @@ import java.io.IOException; import java.util.LinkedList; import java.util.Queue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.util.BloomFilterChunk; import org.apache.hadoop.hbase.util.BloomFilterUtil; @@ -47,8 +47,8 @@ import org.apache.hadoop.io.Writable; public class CompoundBloomFilterWriter extends CompoundBloomFilterBase implements BloomFilterWriter, InlineBlockWriter { - private static final Log LOG = - LogFactory.getLog(CompoundBloomFilterWriter.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompoundBloomFilterWriter.class); /** The current chunk being written to */ private BloomFilterChunk chunk; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index d63c1202127..e0c2c796b4e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -38,8 +38,6 @@ import java.util.TreeMap; import java.util.concurrent.atomic.LongAdder; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -51,6 +49,8 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.MetricsIO; @@ -140,7 +140,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private public class HFile { // LOG is being used in HFileBlock and CheckSumUtil - static final Log LOG = LogFactory.getLog(HFile.class); + static final Logger LOG = LoggerFactory.getLogger(HFile.class); /** * Maximum length of key in HFile. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index c6c7446f3fc..39ba6cd6d3c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -27,14 +27,14 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.io.ByteBuffInputStream; @@ -110,7 +110,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; */ @InterfaceAudience.Private public class HFileBlock implements Cacheable { - private static final Log LOG = LogFactory.getLog(HFileBlock.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class); // Block Header fields. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index 557a69c6b06..7b8815f625a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -30,8 +30,6 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.hbase.ByteBufferKeyOnlyKeyValue; @@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader; @@ -68,7 +68,7 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private public class HFileBlockIndex { - private static final Log LOG = LogFactory.getLog(HFileBlockIndex.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileBlockIndex.class); static final int DEFAULT_MAX_CHUNK_SIZE = 128 * 1024; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java index 5aea107cdce..639130db00f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java @@ -46,8 +46,6 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -80,6 +78,8 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.Counter; @@ -99,7 +99,7 @@ import com.codahale.metrics.Timer; @InterfaceStability.Evolving public class HFilePrettyPrinter extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(HFilePrettyPrinter.class); + private static final Logger LOG = LoggerFactory.getLogger(HFilePrettyPrinter.class); private Options options = new Options(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index 5021b4d1dea..22e38bf58e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -26,8 +26,6 @@ import java.util.List; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.SizeCachedKeyValue; import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.compress.Compression; @@ -74,7 +74,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable { // one file. Ditto for all the HFileReader.ScannerV? implementations. I was running up against // the MaxInlineLevel limit because too many tiers involved reading from an hfile. Was also hard // to navigate the source code when so many classes participating in read. - private static final Log LOG = LogFactory.getLog(HFileReaderImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileReaderImpl.class); /** Data block index reader keeping the root data index in memory */ private HFileBlockIndex.CellBasedKeyBlockIndexReader dataBlockIndexReader; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java index 8c631ebb31a..50d5ddc66e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java @@ -26,8 +26,6 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.CellComparatorImpl.MetaCellComparator; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -60,7 +60,7 @@ import org.apache.hadoop.io.Writable; */ @InterfaceAudience.Private public class HFileWriterImpl implements HFile.Writer { - private static final Log LOG = LogFactory.getLog(HFileWriterImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileWriterImpl.class); private static final long UNSET = -1; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index 3733535df17..43238d930f7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -34,10 +34,10 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.util.Bytes; @@ -99,7 +99,7 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties({"encodingCountsForTest"}) public class LruBlockCache implements ResizableBlockCache, HeapSize { - private static final Log LOG = LogFactory.getLog(LruBlockCache.class); + private static final Logger LOG = LoggerFactory.getLogger(LruBlockCache.class); /** * Percentage of total size that eviction will evict until; e.g. if set to .8, then we will keep diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java index 838fa418898..ce8d53338e3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/PrefetchExecutor.java @@ -29,17 +29,16 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class PrefetchExecutor { - private static final Log LOG = LogFactory.getLog(PrefetchExecutor.class); + private static final Logger LOG = LoggerFactory.getLogger(PrefetchExecutor.class); /** Futures for tracking block prefetch activity */ private static final Map> prefetchFutures = new ConcurrentSkipListMap<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java index 40b64be18d9..8586967d86f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java @@ -31,9 +31,9 @@ import java.util.concurrent.atomic.LongAdder; import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue; import org.apache.commons.collections4.map.LinkedMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry; @@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints; @InterfaceAudience.Private @JsonIgnoreProperties({"indexStatistics", "freeSize", "usedSize"}) public final class BucketAllocator { - private static final Log LOG = LogFactory.getLog(BucketAllocator.class); + private static final Logger LOG = LoggerFactory.getLogger(BucketAllocator.class); @JsonIgnoreProperties({"completelyFree", "uninstantiated"}) public final static class Bucket { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 0ced7c1caaf..ee6eca4bf8b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -54,11 +54,12 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; @@ -102,7 +103,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa */ @InterfaceAudience.Private public class BucketCache implements BlockCache, HeapSize { - private static final Log LOG = LogFactory.getLog(BucketCache.class); + private static final Logger LOG = LoggerFactory.getLogger(BucketCache.class); /** Priority buckets config */ static final String SINGLE_FACTOR_CONFIG_NAME = "hbase.bucketcache.single.factor"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java index ad1c394d7cb..9c19c8846cb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java @@ -28,9 +28,10 @@ import java.nio.channels.FileChannel; import java.util.Arrays; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.hfile.Cacheable; import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer; import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType; @@ -44,7 +45,7 @@ import org.apache.hadoop.util.StringUtils; */ @InterfaceAudience.Private public class FileIOEngine implements IOEngine { - private static final Log LOG = LogFactory.getLog(FileIOEngine.class); + private static final Logger LOG = LoggerFactory.getLogger(FileIOEngine.class); public static final String FILE_DELIMITER = ","; private final String[] filePaths; private final FileChannel[] fileChannels; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java index 4fe39d38ae8..e2f019114b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileMmapEngine.java @@ -23,9 +23,9 @@ import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.hfile.Cacheable; import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType; import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer; @@ -41,7 +41,7 @@ import org.apache.hadoop.util.StringUtils; */ @InterfaceAudience.Private public class FileMmapEngine implements IOEngine { - static final Log LOG = LogFactory.getLog(FileMmapEngine.class); + static final Logger LOG = LoggerFactory.getLogger(FileMmapEngine.class); private final String path; private long size; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java index 1f2025278d7..471eb469b7e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java @@ -21,11 +21,11 @@ import java.lang.management.ManagementFactory; import java.lang.management.MemoryType; import java.lang.management.MemoryUsage; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.MemStoreLAB; import org.apache.hadoop.hbase.util.Pair; @@ -51,7 +51,7 @@ public class MemorySizeUtil { // Default lower water mark limit is 95% size of memstore size. public static final float DEFAULT_MEMSTORE_SIZE_LOWER_LIMIT = 0.95f; - private static final Log LOG = LogFactory.getLog(MemorySizeUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(MemorySizeUtil.class); // a constant to convert a fraction to a percentage private static final int CONVERT_TO_PERCENTAGE = 100; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java index 679f237721f..d36b468a9e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java @@ -17,18 +17,19 @@ */ package org.apache.hadoop.hbase.ipc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.DaemonThreadFactory; -import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil; - import java.io.IOException; +import java.util.HashMap; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.HashMap; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.DaemonThreadFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil; /** * A very simple {@code }RpcScheduler} that serves incoming requests in order. @@ -36,7 +37,7 @@ import java.util.HashMap; * This can be used for HMaster, where no prioritization is needed. */ public class FifoRpcScheduler extends RpcScheduler { - private static final Log LOG = LogFactory.getLog(FifoRpcScheduler.class); + private static final Logger LOG = LoggerFactory.getLogger(FifoRpcScheduler.class); private final int handlerCount; private final int maxQueueLength; private final AtomicInteger queueSize = new AtomicInteger(0); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java index 91c468f197a..7ac0d14e847 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java @@ -38,13 +38,13 @@ import java.net.InetSocketAddress; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.Server; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.security.HBasePolicyProvider; @@ -63,7 +63,7 @@ import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; @InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.CONFIG}) public class NettyRpcServer extends RpcServer { - public static final Log LOG = LogFactory.getLog(NettyRpcServer.class); + public static final Logger LOG = LoggerFactory.getLogger(NettyRpcServer.class); private final InetSocketAddress bindAddress; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java index 86537c0b011..a8a7fe0da46 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java @@ -22,13 +22,13 @@ package org.apache.hadoop.hbase.ipc; import java.util.concurrent.BlockingQueue; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; @InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX}) @InterfaceStability.Evolving public class RWQueueRpcExecutor extends RpcExecutor { - private static final Log LOG = LogFactory.getLog(RWQueueRpcExecutor.class); + private static final Logger LOG = LoggerFactory.getLogger(RWQueueRpcExecutor.class); public static final String CALL_QUEUE_READ_SHARE_CONF_KEY = "hbase.ipc.server.callqueue.read.ratio"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java index 445a460c75a..d79416db0f7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java @@ -30,13 +30,13 @@ import java.util.concurrent.atomic.LongAdder; import java.util.Map; import java.util.HashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.shaded.io.netty.util.internal.StringUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.util.BoundedPriorityBlockingQueue; import org.apache.hadoop.hbase.util.ReflectionUtils; @@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings; */ @InterfaceAudience.Private public abstract class RpcExecutor { - private static final Log LOG = LogFactory.getLog(RpcExecutor.class); + private static final Logger LOG = LoggerFactory.getLogger(RpcExecutor.class); protected static final int DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT = 250; public static final String CALL_QUEUE_HANDLER_FACTOR_CONF_KEY = "hbase.ipc.server.callqueue.handler.factor"; @@ -151,7 +151,7 @@ public abstract class RpcExecutor { } protected int computeNumCallQueues(final int handlerCount, final float callQueuesHandlersFactor) { - return Math.max(1, (int) Math.round(handlerCount * callQueuesHandlersFactor)); + return Math.max(1, Math.round(handlerCount * callQueuesHandlersFactor)); } public Map getCallQueueCountsSummary() { @@ -204,8 +204,7 @@ public abstract class RpcExecutor { queueInitArgs[0] = Math.max((int) queueInitArgs[0], DEFAULT_CALL_QUEUE_SIZE_HARD_LIMIT); } for (int i = 0; i < numQueues; ++i) { - queues - .add((BlockingQueue) ReflectionUtils.newInstance(queueClass, queueInitArgs)); + queues.add(ReflectionUtils.newInstance(queueClass, queueInitArgs)); } } @@ -308,7 +307,7 @@ public abstract class RpcExecutor { } } } catch (Exception e) { - LOG.warn(e); + LOG.warn(e.toString(), e); throw e; } finally { if (interrupted) { @@ -385,6 +384,7 @@ public abstract class RpcExecutor { this.queueSize = queueSize; } + @Override public int getNextQueue() { return ThreadLocalRandom.current().nextInt(queueSize); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 43af98836a1..2a00c3d15fc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -35,9 +35,6 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.LongAdder; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CallQueueTooBigException; import org.apache.hadoop.hbase.CellScanner; @@ -66,6 +63,8 @@ import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; @@ -77,6 +76,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader; +import com.fasterxml.jackson.databind.ObjectMapper; + /** * An RPC server that hosts protobuf described Services. * @@ -85,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHea public abstract class RpcServer implements RpcServerInterface, ConfigurationObserver { // LOG is being used in CallRunner and the log level is being changed in tests - public static final Log LOG = LogFactory.getLog(RpcServer.class); + public static final Logger LOG = LoggerFactory.getLogger(RpcServer.class); protected static final CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION = new CallQueueTooBigException(); @@ -109,7 +110,7 @@ public abstract class RpcServer implements RpcServerInterface, protected static final String AUTH_FAILED_FOR = "Auth failed for "; protected static final String AUTH_SUCCESSFUL_FOR = "Auth successful for "; - protected static final Log AUDITLOG = LogFactory.getLog("SecurityLogger." + protected static final Logger AUDITLOG = LoggerFactory.getLogger("SecurityLogger." + Server.class.getName()); protected SecretManager secretManager; protected final Map saslProps; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java index b1b047d711a..fcd11f5bc7f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerFactory.java @@ -21,11 +21,11 @@ import java.io.IOException; import java.net.InetSocketAddress; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Server; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor; import org.apache.hadoop.hbase.util.ReflectionUtils; @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; @InterfaceAudience.Private public class RpcServerFactory { - public static final Log LOG = LogFactory.getLog(RpcServerFactory.class); + public static final Logger LOG = LoggerFactory.getLogger(RpcServerFactory.class); public static final String CUSTOM_RPC_SERVER_IMPL_CONF_KEY = "hbase.rpc.server.impl"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java index ce9f290c56d..62073db8633 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Handles everything on master-side related to master election. @@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.Private public class ActiveMasterManager extends ZKListener { - private static final Log LOG = LogFactory.getLog(ActiveMasterManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ActiveMasterManager.class); final AtomicBoolean clusterHasActiveMaster = new AtomicBoolean(false); final AtomicBoolean clusterShutDown = new AtomicBoolean(false); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java index ccbfadc38bb..5c084bf4ebc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java @@ -26,14 +26,14 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper; import org.apache.hadoop.hbase.favored.FavoredNodesPlan; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class that is used by {@link RegionPlacementMaintainer} to print * information for favored nodes @@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class AssignmentVerificationReport { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( AssignmentVerificationReport.class.getName()); private TableName tableName = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java index d3ba231b15f..23912d67c8d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java @@ -27,8 +27,6 @@ import java.util.TreeMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; @@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Triple; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A janitor for the catalog tables. Scans the hbase:meta catalog @@ -60,7 +60,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class CatalogJanitor extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(CatalogJanitor.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CatalogJanitor.class.getName()); private final AtomicBoolean alreadyRunning = new AtomicBoolean(false); private final AtomicBoolean enabled = new AtomicBoolean(true); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java index df5444ad4fa..db04c606a95 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java @@ -18,9 +18,9 @@ */ package org.apache.hadoop.hbase.master; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; @@ -42,7 +42,7 @@ import java.util.Set; */ @InterfaceAudience.Private public class DeadServer { - private static final Log LOG = LogFactory.getLog(DeadServer.class); + private static final Logger LOG = LoggerFactory.getLogger(DeadServer.class); /** * Set of known dead servers. On znode expiration, servers are added here. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java index 81a8b55385d..a9e579629c1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DrainingServerTracker.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.NavigableSet; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -31,6 +29,8 @@ import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.ServerName; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tracks the list of draining region servers via ZK. @@ -51,7 +51,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.Private public class DrainingServerTracker extends ZKListener { - private static final Log LOG = LogFactory.getLog(DrainingServerTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(DrainingServerTracker.class); private ServerManager serverManager; private final NavigableSet drainingServers = new TreeSet<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java index b1d83791c61..d37a80a1c3d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java @@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.master; import java.util.Map; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.master.locking.LockManager; @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.procedure2.LockType; @InterfaceAudience.Private public class ExpiredMobFileCleanerChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(ExpiredMobFileCleanerChore.class); + private static final Logger LOG = LoggerFactory.getLogger(ExpiredMobFileCleanerChore.class); private final HMaster master; private ExpiredMobFileCleaner cleaner; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 262dfa23751..e31db820cbd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -39,6 +39,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -50,8 +51,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ClusterStatus; @@ -91,6 +90,7 @@ import org.apache.hadoop.hbase.http.InfoServer; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode; import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure; @@ -188,7 +188,8 @@ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.webapp.WebAppContext; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @@ -221,7 +222,7 @@ import com.google.protobuf.Service; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @SuppressWarnings("deprecation") public class HMaster extends HRegionServer implements MasterServices { - private static final Log LOG = LogFactory.getLog(HMaster.class.getName()); + private static Logger LOG = LoggerFactory.getLogger(HMaster.class.getName()); /** * Protection against zombie master. Started once Master accepts active responsibility and @@ -607,6 +608,7 @@ public class HMaster extends HRegionServer implements MasterServices { return connector.getLocalPort(); } + @Override protected Function getMetaTableObserver() { return builder -> builder.setRegionReplication(conf.getInt(HConstants.META_REPLICAS_NUM, HConstants.DEFAULT_META_REPLICA_NUM)); } @@ -818,7 +820,7 @@ public class HMaster extends HRegionServer implements MasterServices { // Wait for region servers to report in String statusStr = "Wait for region servers to report in"; status.setStatus(statusStr); - LOG.info(status); + LOG.info(Objects.toString(status)); waitForRegionServers(status); if (this.balancer instanceof FavoredNodesPromoter) { @@ -1528,6 +1530,7 @@ public class HMaster extends HRegionServer implements MasterServices { /** * @return Client info for use as prefix on an audit log string; who did an action */ + @Override public String getClientIdAuditPrefix() { return "Client=" + RpcServer.getRequestUserName().orElse(null) + "/" + RpcServer.getRemoteAddress().orElse(null); @@ -2017,7 +2020,7 @@ public class HMaster extends HRegionServer implements MasterServices { } } catch (Throwable t) { status.setStatus("Failed to become active: " + t.getMessage()); - LOG.fatal("Failed to become active master", t); + LOG.error(HBaseMarkers.FATAL, "Failed to become active master", t); // HBASE-5680: Likely hadoop23 vs hadoop 20.x/1.x incompatibility if (t instanceof NoClassDefFoundError && t.getMessage() @@ -2606,13 +2609,13 @@ public class HMaster extends HRegionServer implements MasterServices { } if (cpHost != null) { // HBASE-4014: dump a list of loaded coprocessors. - LOG.fatal("Master server abort: loaded coprocessors are: " + + LOG.error(HBaseMarkers.FATAL, "Master server abort: loaded coprocessors are: " + getLoadedCoprocessors()); } if (t != null) { - LOG.fatal(msg, t); + LOG.error(HBaseMarkers.FATAL, msg, t); } else { - LOG.fatal(msg); + LOG.error(HBaseMarkers.FATAL, msg); } try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java index 093412a47f4..3ec70d3b037 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java @@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.LocalHBaseCluster; @@ -46,10 +44,12 @@ import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class HMasterCommandLine extends ServerCommandLine { - private static final Log LOG = LogFactory.getLog(HMasterCommandLine.class); + private static final Logger LOG = LoggerFactory.getLogger(HMasterCommandLine.class); private static final String USAGE = "Usage: Master [opts] start|stop|clear\n" + diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java index bc262290fc5..ee7bcd6e6a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java @@ -23,8 +23,7 @@ import java.util.List; import java.util.Set; import com.google.protobuf.Service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.MetaMutationAnnotation; @@ -61,6 +60,8 @@ import org.apache.hadoop.hbase.quotas.GlobalQuotaSettings; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.security.User; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides the coprocessor framework and environment for master oriented @@ -71,7 +72,7 @@ import org.apache.yetus.audience.InterfaceAudience; public class MasterCoprocessorHost extends CoprocessorHost { - private static final Log LOG = LogFactory.getLog(MasterCoprocessorHost.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterCoprocessorHost.class); /** * Coprocessor environment extension providing access to master related diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java index 27987f6bce3..8c2c9fdcdc6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.fs.HFileSystem; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -47,6 +46,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class abstracts a bunch of operations the HMaster needs to interact with @@ -55,7 +56,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class MasterFileSystem { - private static final Log LOG = LogFactory.getLog(MasterFileSystem.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterFileSystem.class); /** Parameter name for HBase instance root directory permission*/ public static final String HBASE_DIR_PERMS = "hbase.rootdir.perms"; @@ -264,12 +265,13 @@ public class MasterFileSystem { HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS)); } } catch (DeserializationException de) { - LOG.fatal("Please fix invalid configuration for " + HConstants.HBASE_DIR, de); + LOG.error(HBaseMarkers.FATAL, "Please fix invalid configuration for " + + HConstants.HBASE_DIR, de); IOException ioe = new IOException(); ioe.initCause(de); throw ioe; } catch (IllegalArgumentException iae) { - LOG.fatal("Please fix invalid configuration for " + LOG.error(HBaseMarkers.FATAL, "Please fix invalid configuration for " + HConstants.HBASE_DIR + " " + rd.toString(), iae); throw iae; } @@ -444,7 +446,7 @@ public class MasterFileSystem { public void stop() { } - public void logFileSystemState(Log log) throws IOException { + public void logFileSystemState(Logger log) throws IOException { FSUtils.logFileSystemState(fs, rootdir, log); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java index 7abf02cf55d..78e7e6e9645 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMetaBootstrap.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -38,13 +36,15 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Used by the HMaster on startup to split meta logs and assign the meta table. */ @InterfaceAudience.Private public class MasterMetaBootstrap { - private static final Log LOG = LogFactory.getLog(MasterMetaBootstrap.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterMetaBootstrap.class); private final MonitoredTask status; private final HMaster master; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java index 8677975396d..9d6da0c1ffb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterMobCompactionThread.java @@ -27,12 +27,12 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.master.locking.LockManager; import org.apache.hadoop.hbase.mob.MobUtils; @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; */ @InterfaceAudience.Private public class MasterMobCompactionThread { - static final Log LOG = LogFactory.getLog(MasterMobCompactionThread.class); + static final Logger LOG = LoggerFactory.getLogger(MasterMobCompactionThread.class); private final HMaster master; private final Configuration conf; private final ExecutorService mobCompactorPool; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 6044d021c1a..4a4bbe1718e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -31,8 +31,6 @@ import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; @@ -94,6 +92,9 @@ import org.apache.hadoop.hbase.util.ForeignExceptionUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -290,7 +291,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot public class MasterRpcServices extends RSRpcServices implements MasterService.BlockingInterface, RegionServerStatusService.BlockingInterface, LockService.BlockingInterface { - private static final Log LOG = LogFactory.getLog(MasterRpcServices.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MasterRpcServices.class.getName()); private final HMaster master; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java index 891ea9b49ca..52ba0991aa9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java @@ -27,8 +27,6 @@ import java.util.Set; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class MasterWalManager { - private static final Log LOG = LogFactory.getLog(MasterWalManager.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterWalManager.class); final static PathFilter META_FILTER = new PathFilter() { @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java index c2c37bdd293..83a69880064 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MetricsMaster.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hbase.master; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.metrics.Counter; import org.apache.hadoop.hbase.metrics.Histogram; @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureMetrics; @InterfaceStability.Evolving @InterfaceAudience.Private public class MetricsMaster { - private static final Log LOG = LogFactory.getLog(MetricsMaster.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsMaster.class); private MetricsMasterSource masterSource; private MetricsMasterProcSource masterProcSource; private MetricsMasterQuotaSource masterQuotaSource; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java index 2e0e44c4703..8a7c4e1bf8e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MobCompactionChore.java @@ -22,11 +22,11 @@ import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableState; @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.procedure2.LockType; @InterfaceAudience.Private public class MobCompactionChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(MobCompactionChore.class); + private static final Logger LOG = LoggerFactory.getLogger(MobCompactionChore.class); private HMaster master; private ExecutorService pool; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java index 58acf83c9fc..447c6a6d8ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RackManager.java @@ -22,9 +22,9 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.util.ReflectionUtils; @@ -37,7 +37,7 @@ import org.apache.hadoop.net.ScriptBasedMapping; */ @InterfaceAudience.Private public class RackManager { - private static final Log LOG = LogFactory.getLog(RackManager.class); + private static final Logger LOG = LoggerFactory.getLogger(RackManager.class); public static final String UNKNOWN_RACK = "Unknown Rack"; private DNSToSwitchMapping switchMapping; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java index afd402b1902..370f1f26f7b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionPlacementMaintainer.java @@ -38,8 +38,6 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ClusterStatus.Option; @@ -57,10 +55,9 @@ import org.apache.hadoop.hbase.favored.FavoredNodesPlan; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.MunkresAssignment; import org.apache.hadoop.hbase.util.Pair; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface; @@ -74,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavor @InterfaceAudience.Private // TODO: Remove? Unused. Partially implemented only. public class RegionPlacementMaintainer { - private static final Log LOG = LogFactory.getLog(RegionPlacementMaintainer.class + private static final Logger LOG = LoggerFactory.getLogger(RegionPlacementMaintainer.class .getName()); //The cost of a placement that should never be assigned. private static final float MAX_COST = Float.POSITIVE_INFINITY; @@ -980,12 +977,6 @@ public class RegionPlacementMaintainer { opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " + "information for current plan"); try { - // Set the log4j - Logger.getLogger("org.apache.zookeeper").setLevel(Level.ERROR); - Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.ERROR); - Logger.getLogger("org.apache.hadoop.hbase.master.RegionPlacementMaintainer") - .setLevel(Level.INFO); - CommandLine cmd = new GnuParser().parse(opt, args); Configuration conf = HBaseConfiguration.create(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java index 0e9351d226c..2f2d536ab5f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionServerTracker.java @@ -25,8 +25,6 @@ import java.util.List; import java.util.NavigableMap; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.hadoop.hbase.zookeeper.ZKUtil; @@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tracks the online region servers via ZK. @@ -49,7 +49,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.Private public class RegionServerTracker extends ZKListener { - private static final Log LOG = LogFactory.getLog(RegionServerTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionServerTracker.class); private NavigableMap regionServers = new TreeMap<>(); private ServerManager serverManager; private MasterServices server; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java index b86315be7ac..923a0a7ae7c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java @@ -38,8 +38,6 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Predicate; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClockOutOfSyncException; import org.apache.hadoop.hbase.HConstants; @@ -61,7 +59,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -109,7 +108,7 @@ public class ServerManager { public static final String WAIT_ON_REGIONSERVERS_INTERVAL = "hbase.master.wait.on.regionservers.interval"; - private static final Log LOG = LogFactory.getLog(ServerManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ServerManager.class); // Set if we are to shutdown the cluster. private AtomicBoolean clusterShutdown = new AtomicBoolean(false); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java index 5d889a4f73c..90315775840 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotOfRegionAssignmentFromMeta.java @@ -32,8 +32,6 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -47,6 +45,8 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.favored.FavoredNodeAssignmentHelper; import org.apache.hadoop.hbase.favored.FavoredNodesPlan; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Used internally for reading meta and constructing datastructures that are @@ -56,7 +56,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class SnapshotOfRegionAssignmentFromMeta { - private static final Log LOG = LogFactory.getLog(SnapshotOfRegionAssignmentFromMeta.class + private static final Logger LOG = LoggerFactory.getLogger(SnapshotOfRegionAssignmentFromMeta.class .getName()); private final Connection connection; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java index 8ab087d814d..97fa7c8f5f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java @@ -36,8 +36,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -50,13 +48,15 @@ import org.apache.hadoop.hbase.SplitLogCounters; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination; import org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination.SplitLogManagerDetails; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -90,7 +90,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class SplitLogManager { - private static final Log LOG = LogFactory.getLog(SplitLogManager.class); + private static final Logger LOG = LoggerFactory.getLogger(SplitLogManager.class); private final MasterServices server; @@ -415,7 +415,7 @@ public class SplitLogManager { batch.installed++; return null; } - LOG.fatal("Logic error. Deleted task still present in tasks map"); + LOG.error(HBaseMarkers.FATAL, "Logic error. Deleted task still present in tasks map"); assert false : "Deleted task still present in tasks map"; return t; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java index 174272e3f44..dbf6d394b60 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java @@ -23,8 +23,6 @@ import java.io.InterruptedIOException; import java.util.NavigableSet; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; @@ -55,6 +53,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a helper class used internally to manage the namespace metadata that is stored in @@ -69,7 +69,7 @@ import org.apache.yetus.audience.InterfaceAudience; justification="TODO: synchronize access on nsTable but it is done in tiers above and this " + "class is going away/shrinking") public class TableNamespaceManager { - private static final Log LOG = LogFactory.getLog(TableNamespaceManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TableNamespaceManager.class); private Configuration conf; private MasterServices masterServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java index 330b752b5c9..ad8908ac969 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java @@ -28,13 +28,14 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.TableState; @@ -46,7 +47,7 @@ import org.apache.hadoop.hbase.client.TableState; */ @InterfaceAudience.Private public class TableStateManager { - private static final Log LOG = LogFactory.getLog(TableStateManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TableStateManager.class); private final ReadWriteLock lock = new ReentrantReadWriteLock(); private final MasterServices master; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java index 770d8a49d4e..801caf5eacc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignProcedure.java @@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.master.assignment; import java.io.IOException; import java.util.Comparator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException; import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AssignRegionStateData; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState; @@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto */ @InterfaceAudience.Private public class AssignProcedure extends RegionTransitionProcedure { - private static final Log LOG = LogFactory.getLog(AssignProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(AssignProcedure.class); /** * Set to true when we need recalibrate -- choose a new target -- because original assign failed. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java index 5daf96d8c7d..fac51f037e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java @@ -36,8 +36,6 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; @@ -92,6 +90,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The AssignmentManager is the coordinator for region assign/unassign operations. @@ -106,7 +106,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class AssignmentManager implements ServerListener { - private static final Log LOG = LogFactory.getLog(AssignmentManager.class); + private static final Logger LOG = LoggerFactory.getLogger(AssignmentManager.class); // TODO: AMv2 // - handle region migration from hbase1 to hbase2. @@ -510,7 +510,7 @@ public class AssignmentManager implements ServerListener { } } } catch (Throwable t) { - LOG.error(t); + LOG.error(t.toString(), t); } }).start(); } @@ -748,7 +748,7 @@ public class AssignmentManager implements ServerListener { plan.setDestination(getBalancer().randomAssignment(plan.getRegionInfo(), this.master.getServerManager().createDestinationServersList(exclude))); } catch (HBaseIOException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java index 37521cc67fe..610003df911 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCMergedRegionsProcedure.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.assignment; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -30,7 +28,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException; import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCMergedRegionsState; @@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.G @InterfaceAudience.Private public class GCMergedRegionsProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(GCMergedRegionsProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(GCMergedRegionsProcedure.class); private RegionInfo father; private RegionInfo mother; private RegionInfo mergedChild; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java index 805b870b386..2b433484c64 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.assignment; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.backup.HFileArchiver; @@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException; import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; @@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.G */ @InterfaceAudience.Private public class GCRegionProcedure extends AbstractStateMachineRegionProcedure { - private static final Log LOG = LogFactory.getLog(GCRegionProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(GCRegionProcedure.class); public GCRegionProcedure(final MasterProcedureEnv env, final RegionInfo hri) { super(env, hri); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java index 0d107f2a9d6..6b2d54e1a6a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java @@ -24,8 +24,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -64,7 +62,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse; @@ -79,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M @InterfaceAudience.Private public class MergeTableRegionsProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(MergeTableRegionsProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(MergeTableRegionsProcedure.class); private Boolean traceEnabled; private volatile boolean lock = false; private ServerName regionLocation; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java index 5940f2fe318..a29bfee2cbd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.assignment; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -31,7 +29,8 @@ import org.apache.hadoop.hbase.master.procedure.AbstractStateMachineRegionProced import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionState; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData; @@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M */ @InterfaceAudience.Private public class MoveRegionProcedure extends AbstractStateMachineRegionProcedure { - private static final Log LOG = LogFactory.getLog(MoveRegionProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(MoveRegionProcedure.class); private RegionPlan plan; public MoveRegionProcedure() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java index 079dbd5e777..a45c9e92e7e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java @@ -24,8 +24,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; @@ -49,15 +47,18 @@ import org.apache.hadoop.hbase.util.MultiHConnection; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; + /** * Store Region State to hbase:meta table. */ @InterfaceAudience.Private public class RegionStateStore { - private static final Log LOG = LogFactory.getLog(RegionStateStore.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionStateStore.class); /** The delimiter for meta columns for replicaIds > 0 */ protected static final char META_REPLICA_ID_DELIMITER = '_'; @@ -198,7 +199,7 @@ public class RegionStateStore { .setType(DataType.Put) .setValue(Bytes.toBytes(state.name())) .build()); - LOG.info(info); + LOG.info(info.toString()); final boolean serialReplication = hasSerialReplicationScope(regionInfo.getTable()); if (serialReplication && state == State.OPEN) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java index e9468734a27..c43760474be 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java @@ -34,8 +34,6 @@ import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureEvent; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class RegionStates { - private static final Log LOG = LogFactory.getLog(RegionStates.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionStates.class); protected static final State[] STATES_EXPECTED_ON_OPEN = new State[] { State.OFFLINE, State.CLOSED, // disable/offline diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java index e34c703cc87..6e671c8e6b4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java @@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.master.assignment; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -36,11 +34,11 @@ import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException; import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation; import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteProcedure; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; - import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode; - import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Base class for the Assign and Unassign Procedure. @@ -89,7 +87,7 @@ public abstract class RegionTransitionProcedure extends Procedure implements TableProcedureInterface, RemoteProcedure { - private static final Log LOG = LogFactory.getLog(RegionTransitionProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionTransitionProcedure.class); protected final AtomicBoolean aborted = new AtomicBoolean(false); @@ -163,6 +161,7 @@ public abstract class RegionTransitionProcedure protected abstract void reportTransition(MasterProcedureEnv env, RegionStateNode regionNode, TransitionCode code, long seqId) throws UnexpectedStateException; + @Override public abstract RemoteOperation remoteCallBuild(MasterProcedureEnv env, ServerName serverName); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java index 69024340a46..809d6f3d24a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java @@ -33,8 +33,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -72,7 +70,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Threads; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse; @@ -87,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.S @InterfaceAudience.Private public class SplitTableRegionProcedure extends AbstractStateMachineRegionProcedure { - private static final Log LOG = LogFactory.getLog(SplitTableRegionProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(SplitTableRegionProcedure.class); private Boolean traceEnabled = null; private RegionInfo daughter_1_RI; private RegionInfo daughter_2_RI; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java index 66277bec115..8536e77ef13 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/UnassignProcedure.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.assignment; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.ServerName; @@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperat import org.apache.hadoop.hbase.regionserver.RegionServerAbortedException; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData; @@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto */ @InterfaceAudience.Private public class UnassignProcedure extends RegionTransitionProcedure { - private static final Log LOG = LogFactory.getLog(UnassignProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(UnassignProcedure.class); /** * Where to send the unassign RPC. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java index b964f219f07..712567cb3d1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java @@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.master.balancer; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.master.HMaster; /** @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.master.HMaster; */ @InterfaceAudience.Private public class BalancerChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(BalancerChore.class); + private static final Logger LOG = LoggerFactory.getLogger(BalancerChore.class); private final HMaster master; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java index 8dd34c28ac7..5bacebb671d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java @@ -35,8 +35,6 @@ import java.util.function.Predicate; import java.util.stream.Collectors; import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.RackManager; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; @@ -1004,7 +1004,7 @@ public abstract class BaseLoadBalancer implements LoadBalancer { protected Configuration config = HBaseConfiguration.create(); protected RackManager rackManager; private static final Random RANDOM = new Random(System.currentTimeMillis()); - private static final Log LOG = LogFactory.getLog(BaseLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(BaseLoadBalancer.class); protected MetricsBalancer metricsBalancer = null; protected ClusterStatus clusterStatus = null; protected ServerName masterServerName; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java index 31b1e09291a..427322dfc4b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/ClusterStatusChore.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.master.balancer; import java.io.InterruptedIOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.LoadBalancer; @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.master.LoadBalancer; */ @InterfaceAudience.Private public class ClusterStatusChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(ClusterStatusChore.class); + private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusChore.class); private final HMaster master; private final LoadBalancer balancer; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java index a2fe9a25f24..b3e72faa1cc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java @@ -32,8 +32,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; @@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.master.LoadBalancer; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.util.Pair; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @@ -70,7 +69,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; public class FavoredStochasticBalancer extends StochasticLoadBalancer implements FavoredNodesPromoter { - private static final Log LOG = LogFactory.getLog(FavoredStochasticBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(FavoredStochasticBalancer.class); private FavoredNodesManager fnm; @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java index 30462436223..2b48f59a613 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java @@ -28,8 +28,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HDFSBlocksDistribution; @@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.master.assignment.AssignmentManager; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader; import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; @@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa */ @InterfaceAudience.Private class RegionLocationFinder { - private static final Log LOG = LogFactory.getLog(RegionLocationFinder.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionLocationFinder.class); private static final long CACHE_TIME = 240 * 60 * 1000; private static final HDFSBlocksDistribution EMPTY_BLOCK_DISTRIBUTION = new HDFSBlocksDistribution(); private Configuration conf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java index e356942cbef..adfc577877e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java @@ -28,8 +28,6 @@ import java.util.NavigableMap; import java.util.Random; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -39,7 +37,8 @@ import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue; /** @@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQu */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class SimpleLoadBalancer extends BaseLoadBalancer { - private static final Log LOG = LogFactory.getLog(SimpleLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(SimpleLoadBalancer.class); private static final Random RANDOM = new Random(System.currentTimeMillis()); private RegionInfoComparator riComparator = new RegionInfoComparator(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java index 23d9cb47c69..71231559254 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java @@ -30,8 +30,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -52,7 +50,8 @@ import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegi import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Optional; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -121,7 +120,7 @@ public class StochasticLoadBalancer extends BaseLoadBalancer { "hbase.master.balancer.stochastic.minCostNeedBalance"; protected static final Random RANDOM = new Random(System.currentTimeMillis()); - private static final Log LOG = LogFactory.getLog(StochasticLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(StochasticLoadBalancer.class); Map> loads = new HashMap<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 582df8436a3..bc7c82d1db3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -35,8 +35,6 @@ import java.util.concurrent.ForkJoinPool; import java.util.concurrent.RecursiveTask; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.ipc.RemoteException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstract Cleaner that uses a chain of delegates to clean a directory of files @@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException; public abstract class CleanerChore extends ScheduledChore implements ConfigurationObserver { - private static final Log LOG = LogFactory.getLog(CleanerChore.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CleanerChore.class.getName()); private static final int AVAIL_PROCESSORS = Runtime.getRuntime().availableProcessors(); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java index 5c78dc498e4..8c02f3ed6e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.util.StealJobQueue; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This Chore, every time it runs, will clear the HFiles in the hfile archive @@ -77,7 +76,7 @@ public class HFileCleaner extends CleanerChore { "hbase.regionserver.hfilecleaner.small.thread.count"; public final static int DEFAULT_SMALL_HFILE_DELETE_THREAD_NUMBER = 1; - private static final Log LOG = LogFactory.getLog(HFileCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileCleaner.class); StealJobQueue largeFileQueue; BlockingQueue smallFileQueue; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java index 8129e34b9cc..db5230c8538 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hbase.master.cleaner; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -27,6 +26,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.util.FSUtils; @@ -41,7 +42,7 @@ import org.apache.hadoop.hbase.util.FSUtils; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class HFileLinkCleaner extends BaseHFileCleanerDelegate { - private static final Log LOG = LogFactory.getLog(HFileLinkCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileLinkCleaner.class); private FileSystem fs = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java index 44aafe20386..5d5dddbc17d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java @@ -25,8 +25,6 @@ import java.util.LinkedList; import java.util.List; import java.util.concurrent.LinkedBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class LogCleaner extends CleanerChore { - private static final Log LOG = LogFactory.getLog(LogCleaner.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(LogCleaner.class.getName()); public static final String OLD_WALS_CLEANER_SIZE = "hbase.oldwals.cleaner.thread.size"; public static final int OLD_WALS_CLEANER_DEFAULT_SIZE = 2; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java index 23e5a666b2b..43a99bdb51f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationMetaCleaner.java @@ -25,14 +25,14 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.Delete; @@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private public class ReplicationMetaCleaner extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(ReplicationMetaCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationMetaCleaner.class); private final Admin admin; private final MasterServices master; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java index 3f7bd744739..97deab51ed0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleaner.java @@ -26,8 +26,6 @@ import java.util.Map; import java.util.Set; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; @@ -41,13 +39,15 @@ import org.apache.hadoop.hbase.replication.ReplicationStateZKBase; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Used to clean the replication queues belonging to the peer which does not exist. */ @InterfaceAudience.Private public class ReplicationZKNodeCleaner { - private static final Log LOG = LogFactory.getLog(ReplicationZKNodeCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationZKNodeCleaner.class); private final ZKWatcher zkw; private final ReplicationQueuesClient queuesClient; private final ReplicationPeers replicationPeers; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java index 6be13849fa9..8d5df9bfd2d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/ReplicationZKNodeCleanerChore.java @@ -22,18 +22,18 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Chore that will clean the replication queues belonging to the peer which does not exist. */ @InterfaceAudience.Private public class ReplicationZKNodeCleanerChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(ReplicationZKNodeCleanerChore.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationZKNodeCleanerChore.class); private final ReplicationZKNodeCleaner cleaner; public ReplicationZKNodeCleanerChore(Stoppable stopper, int period, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java index f9ebdf3bb2d..e789752d1ab 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.master.cleaner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class TimeToLiveHFileCleaner extends BaseHFileCleanerDelegate { - private static final Log LOG = LogFactory.getLog(TimeToLiveHFileCleaner.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TimeToLiveHFileCleaner.class.getName()); public static final String TTL_CONF_KEY = "hbase.master.hfilecleaner.ttl"; // default ttl = 5 minutes public static final long DEFAULT_TTL = 60000 * 5; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java index c2b872f1ef9..7385273e71f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.master.cleaner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class TimeToLiveLogCleaner extends BaseLogCleanerDelegate { - private static final Log LOG = LogFactory.getLog(TimeToLiveLogCleaner.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TimeToLiveLogCleaner.class.getName()); public static final String TTL_CONF_KEY = "hbase.master.logcleaner.ttl"; // default ttl = 10 minutes public static final long DEFAULT_TTL = 600_000L; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java index cd9a7ec3d2a..467accdac09 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveProcedureWALCleaner.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.hbase.master.cleaner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Procedure WAL cleaner that uses the timestamp of the Procedure WAL to determine if it should be @@ -32,8 +32,8 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class TimeToLiveProcedureWALCleaner extends BaseFileCleanerDelegate { - - private static final Log LOG = LogFactory.getLog(TimeToLiveProcedureWALCleaner.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(TimeToLiveProcedureWALCleaner.class.getName()); public static final String TTL_CONF_KEY = "hbase.master.procedurewalcleaner.ttl"; // default ttl = 7 days public static final long DEFAULT_TTL = 604_800_000L; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java index 883d6596caa..36269f378e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java @@ -22,15 +22,14 @@ import java.io.IOException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.procedure2.LockType; import org.apache.hadoop.hbase.util.NonceKey; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -38,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public final class LockManager { - private static final Log LOG = LogFactory.getLog(LockManager.class); + private static final Logger LOG = LoggerFactory.getLogger(LockManager.class); private final HMaster master; private final RemoteLocks remoteLocks; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java index 61843d81fa3..edf7642574d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockProcedure.java @@ -24,8 +24,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureEvent; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData; @@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos; @InterfaceAudience.Private public final class LockProcedure extends Procedure implements TableProcedureInterface { - private static final Log LOG = LogFactory.getLog(LockProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(LockProcedure.class); public static final int DEFAULT_REMOTE_LOCKS_TIMEOUT_MS = 30000; // timeout in ms public static final String REMOTE_LOCKS_TIMEOUT_MS_CONF = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java index b6602b190b4..7c33661d7c3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/MergeNormalizationPlan.java @@ -20,18 +20,18 @@ package org.apache.hadoop.hbase.master.normalizer; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Normalization plan to merge regions (smallest region in the table with its smallest neighbor). */ @InterfaceAudience.Private public class MergeNormalizationPlan implements NormalizationPlan { - private static final Log LOG = LogFactory.getLog(MergeNormalizationPlan.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MergeNormalizationPlan.class.getName()); private final RegionInfo firstRegion; private final RegionInfo secondRegion; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java index 7137bc9d9d3..19d2dc7a3ba 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/RegionNormalizerChore.java @@ -18,10 +18,10 @@ */ package org.apache.hadoop.hbase.master.normalizer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.master.HMaster; import java.io.IOException; @@ -32,7 +32,7 @@ import java.io.IOException; */ @InterfaceAudience.Private public class RegionNormalizerChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(RegionNormalizerChore.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionNormalizerChore.class); private final HMaster master; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java index 8190f271ced..767324aa49c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java @@ -23,8 +23,6 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.ServerName; @@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.master.MasterRpcServices; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; /** @@ -60,7 +59,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; @InterfaceAudience.Private public class SimpleRegionNormalizer implements RegionNormalizer { - private static final Log LOG = LogFactory.getLog(SimpleRegionNormalizer.class); + private static final Logger LOG = LoggerFactory.getLogger(SimpleRegionNormalizer.class); private static final int MIN_REGION_COUNT = 3; private MasterServices masterServices; private MasterRpcServices masterRpcServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java index 9217143edd3..b5f8e823ec4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SplitNormalizationPlan.java @@ -21,18 +21,18 @@ package org.apache.hadoop.hbase.master.normalizer; import java.io.IOException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Normalization plan to split region. */ @InterfaceAudience.Private public class SplitNormalizationPlan implements NormalizationPlan { - private static final Log LOG = LogFactory.getLog(SplitNormalizationPlan.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(SplitNormalizationPlan.class.getName()); private RegionInfo regionInfo; private byte[] splitPoint; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java index a524879bf0c..a17108fdcaa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java @@ -25,8 +25,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -54,7 +52,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; @@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot @InterfaceAudience.Private public class CloneSnapshotProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(CloneSnapshotProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(CloneSnapshotProcedure.class); private TableDescriptor tableDescriptor; private SnapshotDescription snapshot; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java index fa743bdbddf..f0d0af8c537 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java @@ -19,11 +19,12 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceExistException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.master.TableNamespaceManager; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; @@ -38,7 +39,7 @@ import org.apache.hadoop.hbase.util.FSUtils; @InterfaceAudience.Private public class CreateNamespaceProcedure extends AbstractStateMachineNamespaceProcedure { - private static final Log LOG = LogFactory.getLog(CreateNamespaceProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(CreateNamespaceProcedure.class); private NamespaceDescriptor nsDescriptor; private Boolean traceEnabled; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java index ed137c25981..63d6d2f7e4a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.ModifyRegionUtils; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; @@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.C @InterfaceAudience.Private public class CreateTableProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(CreateTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(CreateTableProcedure.class); private TableDescriptor tableDescriptor; private List newRegions; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java index 9646946e89a..1c587eb55d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java @@ -20,8 +20,7 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.FileNotFoundException; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -29,6 +28,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.constraint.ConstraintException; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.master.TableNamespaceManager; @@ -44,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils; @InterfaceAudience.Private public class DeleteNamespaceProcedure extends AbstractStateMachineNamespaceProcedure { - private static final Log LOG = LogFactory.getLog(DeleteNamespaceProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(DeleteNamespaceProcedure.class); private NamespaceDescriptor nsDescriptor; private String namespaceName; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java index 4cc18755ec2..151e3d65d18 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; @@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.D @InterfaceAudience.Private public class DeleteTableProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(DeleteTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(DeleteTableProcedure.class); private List regions; private TableName tableName; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java index 045ee9e7e8f..e748c6ce7f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java @@ -19,13 +19,14 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.TableState; import org.apache.hadoop.hbase.constraint.ConstraintException; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; @@ -38,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.D @InterfaceAudience.Private public class DisableTableProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(DisableTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(DisableTableProcedure.class); private TableName tableName; private boolean skipTableStateCheck; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java index cf406969604..c501e5396a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.RegionInfo; @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.E @InterfaceAudience.Private public class EnableTableProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(EnableTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(EnableTableProcedure.class); private TableName tableName; private boolean skipTableStateCheck; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java index 02ecdc6b3d9..6d06de2c514 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterDDLOperationHelper.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -31,13 +29,15 @@ import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class for schema change procedures */ @InterfaceAudience.Private public final class MasterDDLOperationHelper { - private static final Log LOG = LogFactory.getLog(MasterDDLOperationHelper.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterDDLOperationHelper.class); private MasterDDLOperationHelper() {} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java index c9c3ac98204..0a4c97db59c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java @@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.conf.ConfigurationObserver; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; @@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils; @InterfaceAudience.Private @InterfaceStability.Evolving public class MasterProcedureEnv implements ConfigurationObserver { - private static final Log LOG = LogFactory.getLog(MasterProcedureEnv.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureEnv.class); @InterfaceAudience.Private public static class WALStoreLeaseRecovery implements WALProcedureStore.LeaseRecovery { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java index 94028453546..c60de5c9dfa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java @@ -26,8 +26,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableExistsException; @@ -50,7 +48,8 @@ import org.apache.hadoop.hbase.util.AvlUtil.AvlLinkedNode; import org.apache.hadoop.hbase.util.AvlUtil.AvlTree; import org.apache.hadoop.hbase.util.AvlUtil.AvlTreeIterator; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -104,7 +103,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class MasterProcedureScheduler extends AbstractProcedureScheduler { - private static final Log LOG = LogFactory.getLog(MasterProcedureScheduler.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureScheduler.class); private final static ServerQueueKeyComparator SERVER_QUEUE_KEY_COMPARATOR = new ServerQueueKeyComparator(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java index 7826f96f792..b87f4379899 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java @@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; @@ -36,7 +36,7 @@ import org.apache.hadoop.security.UserGroupInformation; @InterfaceAudience.Private @InterfaceStability.Evolving public final class MasterProcedureUtil { - private static final Log LOG = LogFactory.getLog(MasterProcedureUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureUtil.class); private MasterProcedureUtil() {} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java index 697a2ea668b..2a7dc5b28bb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java @@ -19,11 +19,12 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.master.TableNamespaceManager; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -36,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M @InterfaceAudience.Private public class ModifyNamespaceProcedure extends AbstractStateMachineNamespaceProcedure { - private static final Log LOG = LogFactory.getLog(ModifyNamespaceProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(ModifyNamespaceProcedure.class); private NamespaceDescriptor oldNsDescriptor; private NamespaceDescriptor newNsDescriptor; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java index bda8b81e987..f0be1e08197 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java @@ -23,8 +23,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableState; @@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.M @InterfaceAudience.Private public class ModifyTableProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(ModifyTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(ModifyTableProcedure.class); private TableDescriptor unmodifiedTableDescriptor = null; private TableDescriptor modifiedTableDescriptor; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java index 23b83acd645..ae37a48d26a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java @@ -26,8 +26,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoordinatedStateException; import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException; @@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.quotas.MasterQuotaManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper to synchronously wait on conditions. @@ -50,7 +50,7 @@ import org.apache.yetus.audience.InterfaceStability; @InterfaceAudience.Private @InterfaceStability.Evolving public final class ProcedureSyncWait { - private static final Log LOG = LogFactory.getLog(ProcedureSyncWait.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureSyncWait.class); private ProcedureSyncWait() {} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java index 045c416ff5c..72e0846cefa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java @@ -25,8 +25,6 @@ import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.master.ServerListener; import org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.ipc.RemoteException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; @@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionR public class RSProcedureDispatcher extends RemoteProcedureDispatcher implements ServerListener { - private static final Log LOG = LogFactory.getLog(RSProcedureDispatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(RSProcedureDispatcher.class); public static final String RS_RPC_STARTUP_WAIT_TIME_CONF_KEY = "hbase.regionserver.rpc.startup.waittime"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java index f1174d42ea4..90dfff043a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.hadoop.hbase.procedure2.StateMachineProcedure; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RecoverMetaState; @@ -51,7 +50,7 @@ import com.google.common.base.Preconditions; public class RecoverMetaProcedure extends StateMachineProcedure implements TableProcedureInterface { - private static final Log LOG = LogFactory.getLog(RecoverMetaProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(RecoverMetaProcedure.class); private ServerName failedMetaServer; private boolean shouldSplitWal; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java index 2cf558437a8..9aa5171786d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java @@ -25,8 +25,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotManifest; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; @@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot @InterfaceAudience.Private public class RestoreSnapshotProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(RestoreSnapshotProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(RestoreSnapshotProcedure.class); private TableDescriptor modifiedTableDescriptor; private List regionsToRestore = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java index 0e37c1174b5..e9be05800dc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException; import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.hadoop.hbase.procedure2.StateMachineProcedure; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.S public class ServerCrashProcedure extends StateMachineProcedure implements ServerProcedureInterface { - private static final Log LOG = LogFactory.getLog(ServerCrashProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(ServerCrashProcedure.class); /** * Name of the crashed server to process. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java index dce3b414198..541fb8e1aee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotFoundException; @@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; import org.apache.hadoop.hbase.util.ModifyRegionUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; @@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.T @InterfaceAudience.Private public class TruncateTableProcedure extends AbstractStateMachineTableProcedure { - private static final Log LOG = LogFactory.getLog(TruncateTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TruncateTableProcedure.class); private boolean preserveSplits; private List regions; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java index 84c154f8bff..ee5afd7d1e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.Set; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -39,7 +37,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; /** @@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot @InterfaceAudience.Private @InterfaceStability.Evolving public class DisabledTableSnapshotHandler extends TakeSnapshotHandler { - private static final Log LOG = LogFactory.getLog(DisabledTableSnapshotHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(DisabledTableSnapshotHandler.class); /** * @param snapshot descriptor of the snapshot to take diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java index 399a1274e48..db2b9c888e6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java @@ -22,8 +22,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.errorhandling.ForeignException; @@ -34,7 +32,8 @@ import org.apache.hadoop.hbase.procedure.ProcedureCoordinator; import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; @@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot @InterfaceAudience.Private public class EnabledTableSnapshotHandler extends TakeSnapshotHandler { - private static final Log LOG = LogFactory.getLog(EnabledTableSnapshotHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(EnabledTableSnapshotHandler.class); private final ProcedureCoordinator coordinator; public EnabledTableSnapshotHandler(SnapshotDescription snapshot, MasterServices master, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java index b698082f741..1e1a3c421dc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; @@ -77,7 +76,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot @InterfaceAudience.Private @InterfaceStability.Unstable public final class MasterSnapshotVerifier { - private static final Log LOG = LogFactory.getLog(MasterSnapshotVerifier.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterSnapshotVerifier.class); private SnapshotDescription snapshot; private FileSystem fs; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java index 56d1319c949..560e7f568c9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java @@ -29,12 +29,6 @@ import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.locks.ReentrantLock; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -43,6 +37,13 @@ import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Intelligently keep track of all the files for all the snapshots. @@ -85,7 +86,7 @@ public class SnapshotFileCache implements Stoppable { Collection filesUnderSnapshot(final Path snapshotDir) throws IOException; } - private static final Log LOG = LogFactory.getLog(SnapshotFileCache.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotFileCache.class); private volatile boolean stop = false; private final FileSystem fs; private final SnapshotFileInspector fileInspector; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java index f3ca9935766..a8475f0f6e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java @@ -22,10 +22,10 @@ import java.util.Collection; import java.util.Collections; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) @InterfaceStability.Evolving public class SnapshotHFileCleaner extends BaseHFileCleanerDelegate { - private static final Log LOG = LogFactory.getLog(SnapshotHFileCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotHFileCleaner.class); /** * Conf key for the frequency to attempt to refresh the cache of hfiles currently used in diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java index 20a4f39935e..3870601db43 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java @@ -29,8 +29,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; @@ -83,7 +81,8 @@ import org.apache.hadoop.hbase.util.NonceKey; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription; @@ -102,7 +101,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) @InterfaceStability.Unstable public class SnapshotManager extends MasterProcedureManager implements Stoppable { - private static final Log LOG = LogFactory.getLog(SnapshotManager.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotManager.class); /** By default, check to see if the snapshot is complete every WAKE MILLIS (ms) */ private static final int SNAPSHOT_WAKE_MILLIS_DEFAULT = 500; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java index 808cab5b354..9b077d15e65 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java @@ -25,8 +25,6 @@ import java.util.Set; import java.util.concurrent.CancellationException; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -56,7 +54,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; /** @@ -69,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot @InterfaceAudience.Private public abstract class TakeSnapshotHandler extends EventHandler implements SnapshotSentinel, ForeignExceptionSnare { - private static final Log LOG = LogFactory.getLog(TakeSnapshotHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(TakeSnapshotHandler.class); private volatile boolean finished; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java index 447629b5508..01c195a4d31 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -52,6 +50,8 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Compact passed set of files in the mob-enabled column family. @@ -59,7 +59,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class DefaultMobStoreCompactor extends DefaultCompactor { - private static final Log LOG = LogFactory.getLog(DefaultMobStoreCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultMobStoreCompactor.class); private long mobSizeThreshold; private HMobStore mobStore; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java index 27809c4e00a..a932dad7080 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java @@ -24,8 +24,6 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of the StoreFlusher. It extends the DefaultStoreFlusher. @@ -65,7 +65,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class DefaultMobStoreFlusher extends DefaultStoreFlusher { - private static final Log LOG = LogFactory.getLog(DefaultMobStoreFlusher.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultMobStoreFlusher.class); private final Object flushLock = new Object(); private long mobCellValueSizeThreshold = 0; private Path targetPath; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java index 3924ee6483d..053cba641f3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.Connection; @@ -47,7 +47,7 @@ import org.apache.hadoop.util.ToolRunner; @InterfaceAudience.Private public class ExpiredMobFileCleaner extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(ExpiredMobFileCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(ExpiredMobFileCleaner.class); /** * Cleans the MOB files when they're expired and their min versions are 0. * If the latest timestamp of Cells in a MOB file is older than the TTL in the column family, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java index 813de8ce11d..13caee600fd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java @@ -31,12 +31,12 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.IdLock; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa @InterfaceAudience.Private public class MobFileCache { - private static final Log LOG = LogFactory.getLog(MobFileCache.class); + private static final Logger LOG = LoggerFactory.getLogger(MobFileCache.class); /* * Eviction and statistics thread. Periodically run to print the statistics and diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java index b68e4ffc77a..9fa4e4cf833 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java @@ -36,8 +36,6 @@ import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -79,6 +77,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.hbase.util.Threads; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The mob utilities @@ -86,7 +86,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public final class MobUtils { - private static final Log LOG = LogFactory.getLog(MobUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(MobUtils.class); private final static long WEEKLY_THRESHOLD_MULTIPLIER = 7; private final static long MONTHLY_THRESHOLD_MULTIPLIER = 4 * WEEKLY_THRESHOLD_MULTIPLIER; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index cf661db5113..9c234705949 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -35,13 +35,12 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableMap; +import java.util.Objects; import java.util.TreeMap; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -49,7 +48,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; @@ -90,6 +88,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of {@link MobCompactor} that compacts the mob files in partitions. @@ -97,7 +97,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class PartitionedMobCompactor extends MobCompactor { - private static final Log LOG = LogFactory.getLog(PartitionedMobCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(PartitionedMobCompactor.class); protected long mergeableSize; protected int delFileMaxCount; /** The number of files compacted in a batch */ @@ -362,7 +362,7 @@ public class PartitionedMobCompactor extends MobCompactor { LOG.info( "After a mob compaction with all files selected, archiving the del files "); for (CompactionDelPartition delPartition : request.getDelPartitions()) { - LOG.info(delPartition.listDelFiles()); + LOG.info(Objects.toString(delPartition.listDelFiles())); try { MobUtils.removeMobFiles(conf, fs, tableName, mobTableDir, column.getName(), delPartition.getStoreFiles()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java index 837ddf007d3..61d00075619 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java @@ -28,11 +28,11 @@ import java.util.Iterator; import java.util.List; import org.apache.commons.collections4.queue.CircularFifoQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; @@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ @InterfaceAudience.Private public class TaskMonitor { - private static final Log LOG = LogFactory.getLog(TaskMonitor.class); + private static final Logger LOG = LoggerFactory.getLogger(TaskMonitor.class); public static final String MAX_TASKS_KEY = "hbase.taskmonitor.max.tasks"; public static final int DEFAULT_MAX_TASKS = 1000; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java index efe2c1e9579..0a74b093a8d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceAuditor.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.namespace; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.quotas.QuotaExceededException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The Class NamespaceAuditor performs checks to ensure operations like table creation @@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class NamespaceAuditor { - private static final Log LOG = LogFactory.getLog(NamespaceAuditor.class); + private static final Logger LOG = LoggerFactory.getLogger(NamespaceAuditor.class); private NamespaceStateManager stateManager; private MasterServices masterServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java index c62594adc86..3cf5a257233 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceStateManager.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; @@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.master.TableNamespaceManager; import org.apache.hadoop.hbase.quotas.QuotaExceededException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * NamespaceStateManager manages state (in terms of quota) of all the namespaces. It contains @@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class NamespaceStateManager { - private static final Log LOG = LogFactory.getLog(NamespaceStateManager.class); + private static final Logger LOG = LoggerFactory.getLogger(NamespaceStateManager.class); private ConcurrentMap nsStateCache; private MasterServices master; private volatile boolean initialized = false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java index 1e12304b760..15e882b81dd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java @@ -25,9 +25,9 @@ import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener; @@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ @InterfaceAudience.Private public class Procedure implements Callable, ForeignExceptionListener { - private static final Log LOG = LogFactory.getLog(Procedure.class); + private static final Logger LOG = LoggerFactory.getLogger(Procedure.class); // // Arguments and naming diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java index 2f67d41bee5..c9f068b6325 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java @@ -29,9 +29,9 @@ import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; */ @InterfaceAudience.Private public class ProcedureCoordinator { - private static final Log LOG = LogFactory.getLog(ProcedureCoordinator.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureCoordinator.class); final static long KEEP_ALIVE_MILLIS_DEFAULT = 5000; final static long TIMEOUT_MILLIS_DEFAULT = 60000; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java index 09a29720307..af4d2d7104b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java @@ -23,11 +23,11 @@ import java.util.HashSet; import java.util.Set; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides the common setup framework and runtime services for globally @@ -44,7 +44,7 @@ public abstract class ProcedureManagerHost { public static final String MASTER_PROCEDURE_CONF_KEY = "hbase.procedure.master.classes"; - private static final Log LOG = LogFactory.getLog(ProcedureManagerHost.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureManagerHost.class); protected Set procedures = new HashSet<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java index c8399ba4bd8..86923ae16ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java @@ -26,9 +26,9 @@ import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.errorhandling.ForeignException; @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; */ @InterfaceAudience.Private public class ProcedureMember implements Closeable { - private static final Log LOG = LogFactory.getLog(ProcedureMember.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcedureMember.class); final static long KEEP_ALIVE_MILLIS_DEFAULT = 5000; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java index 0f4ea645869..5cb2529f516 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/RegionServerProcedureManagerHost.java @@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.procedure; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.procedure.flush.RegionServerFlushTableProcedureManager; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides the globally barriered procedure framework and environment @@ -36,8 +36,8 @@ import org.apache.zookeeper.KeeperException; public class RegionServerProcedureManagerHost extends ProcedureManagerHost { - private static final Log LOG = LogFactory - .getLog(RegionServerProcedureManagerHost.class); + private static final Logger LOG = LoggerFactory + .getLogger(RegionServerProcedureManagerHost.class); public void initialize(RegionServerServices rss) throws KeeperException { for (RegionServerProcedureManager proc : procedures) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java index 892733828e7..6416e6a65b9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java @@ -21,14 +21,14 @@ import java.io.IOException; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; import org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Distributed procedure member's Subprocedure. A procedure is sarted on a ProcedureCoordinator @@ -51,7 +51,7 @@ import org.apache.zookeeper.KeeperException; * barrierName. (ex: snapshot121126). */ abstract public class Subprocedure implements Callable { - private static final Log LOG = LogFactory.getLog(Subprocedure.class); + private static final Logger LOG = LoggerFactory.getLogger(Subprocedure.class); // Name of the procedure final private String barrierName; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java index 609ce8ee3b5..c1fb8f5c1a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java @@ -22,8 +22,6 @@ import java.io.InterruptedIOException; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.errorhandling.ForeignException; @@ -31,13 +29,15 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator} */ @InterfaceAudience.Private public class ZKProcedureCoordinator implements ProcedureCoordinatorRpcs { - private static final Log LOG = LogFactory.getLog(ZKProcedureCoordinator.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureCoordinator.class); private ZKProcedureUtil zkProc = null; protected ProcedureCoordinator coordinator = null; // if started this should be non-null diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java index 45e67609679..ea41ae89726 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java @@ -21,8 +21,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.errorhandling.ForeignException; @@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ZooKeeper based controller for a procedure member. @@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.Private public class ZKProcedureMemberRpcs implements ProcedureMemberRpcs { - private static final Log LOG = LogFactory.getLog(ZKProcedureMemberRpcs.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureMemberRpcs.class); private final ZKProcedureUtil zkController; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java index 0349290eabb..976e36b49b7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java @@ -21,14 +21,14 @@ import java.io.Closeable; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a shared ZooKeeper-based znode management utils for distributed procedure. All znode @@ -52,7 +52,7 @@ import org.apache.zookeeper.KeeperException; public abstract class ZKProcedureUtil extends ZKListener implements Closeable { - private static final Log LOG = LogFactory.getLog(ZKProcedureUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKProcedureUtil.class); public static final String ACQUIRED_BARRIER_ZNODE_DEFAULT = "acquired"; public static final String REACHED_BARRIER_ZNODE_DEFAULT = "reached"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java index 1b4c561c237..5c005a75a9a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.procedure.flush; import java.util.List; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.procedure.ProcedureMember; @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion; */ @InterfaceAudience.Private public class FlushTableSubprocedure extends Subprocedure { - private static final Log LOG = LogFactory.getLog(FlushTableSubprocedure.class); + private static final Logger LOG = LoggerFactory.getLogger(FlushTableSubprocedure.class); private final String table; private final List regions; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java index 66f9240f7b3..55d73d87e70 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -47,7 +45,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription; @@ -65,7 +64,7 @@ public class MasterFlushTableProcedureManager extends MasterProcedureManager { "hbase.flush.procedure.master.threads"; private static final int FLUSH_PROC_POOL_THREADS_DEFAULT = 1; - private static final Log LOG = LogFactory.getLog(MasterFlushTableProcedureManager.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterFlushTableProcedureManager.class); private MasterServices master; private ProcedureCoordinator coordinator; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java index d328561ce20..bf55c0c885c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java @@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; @@ -51,13 +49,16 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This manager class handles flushing of the regions for table on a {@link HRegionServer}. */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class RegionServerFlushTableProcedureManager extends RegionServerProcedureManager { - private static final Log LOG = LogFactory.getLog(RegionServerFlushTableProcedureManager.class); + private static final Logger LOG = + LoggerFactory.getLogger(RegionServerFlushTableProcedureManager.class); private static final String CONCURENT_FLUSH_TASKS_KEY = "hbase.flush.procedure.region.concurrentTasks"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java index 38fc488e487..a15aeb6852c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java @@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.quotas; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Result; @InterfaceAudience.Private @InterfaceStability.Evolving public class DefaultOperationQuota implements OperationQuota { - private static final Log LOG = LogFactory.getLog(DefaultOperationQuota.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultOperationQuota.class); private final List limiters; private long writeAvailable = 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java index a76e9c1937f..eded0764722 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileSystemUtilizationChore.java @@ -23,8 +23,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.client.RegionInfo; @@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A chore which computes the size of each {@link HRegion} on the FileSystem hosted by the given {@link HRegionServer}. */ @InterfaceAudience.Private public class FileSystemUtilizationChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(FileSystemUtilizationChore.class); + private static final Logger LOG = LoggerFactory.getLogger(FileSystemUtilizationChore.class); static final String FS_UTILIZATION_CHORE_PERIOD_KEY = "hbase.regionserver.quotas.fs.utilization.chore.period"; static final int FS_UTILIZATION_CHORE_PERIOD_DEFAULT = 1000 * 60 * 5; // 5 minutes in millis diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java index e4fa3eaa1d1..79be1ac6bc5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java @@ -28,8 +28,6 @@ import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.namespace.NamespaceAuditor; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRe @InterfaceAudience.Private @InterfaceStability.Evolving public class MasterQuotaManager implements RegionStateListener { - private static final Log LOG = LogFactory.getLog(MasterQuotaManager.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterQuotaManager.class); private static final Map EMPTY_MAP = Collections.unmodifiableMap( new HashMap<>()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java index 6a5e38c5e5e..adabdac6a86 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java @@ -30,14 +30,14 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -57,7 +57,7 @@ import org.apache.hadoop.security.UserGroupInformation; @InterfaceAudience.Private @InterfaceStability.Evolving public class QuotaCache implements Stoppable { - private static final Log LOG = LogFactory.getLog(QuotaCache.class); + private static final Logger LOG = LoggerFactory.getLogger(QuotaCache.class); public static final String REFRESH_CONF_KEY = "hbase.quota.refresh.period"; private static final int REFRESH_DEFAULT_PERIOD = 5 * 60000; // 5min diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java index bfbda35cbc9..39048d79eee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java @@ -26,8 +26,6 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; @@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.master.MetricsMaster; import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; @@ -53,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota; */ @InterfaceAudience.Private public class QuotaObserverChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(QuotaObserverChore.class); + private static final Logger LOG = LoggerFactory.getLogger(QuotaObserverChore.class); static final String QUOTA_OBSERVER_CHORE_PERIOD_KEY = "hbase.master.quotas.observer.chore.period"; static final int QUOTA_OBSERVER_CHORE_PERIOD_DEFAULT = 1000 * 60 * 1; // 1 minutes in millis diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java index f3a745ca3f8..6bc3ce9d479 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java @@ -23,8 +23,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; @@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.Private @InterfaceStability.Evolving public class QuotaUtil extends QuotaTableUtil { - private static final Log LOG = LogFactory.getLog(QuotaUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(QuotaUtil.class); public static final String QUOTA_CONF_KEY = "hbase.quota.enabled"; private static final boolean QUOTA_ENABLED_DEFAULT = false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java index 51b8cc950b2..62e06146b36 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java @@ -22,10 +22,10 @@ import java.io.IOException; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.RpcScheduler; import org.apache.hadoop.hbase.ipc.RpcServer; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private @InterfaceStability.Evolving public class RegionServerRpcQuotaManager { - private static final Log LOG = LogFactory.getLog(RegionServerRpcQuotaManager.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionServerRpcQuotaManager.class); private final RegionServerServices rsServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java index 80bbdc36261..0a998dc4694 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java @@ -24,10 +24,10 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicReference; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus; import org.apache.hadoop.hbase.regionserver.RegionServerServices; @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class RegionServerSpaceQuotaManager { - private static final Log LOG = LogFactory.getLog(RegionServerSpaceQuotaManager.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionServerSpaceQuotaManager.class); private final RegionServerServices rsServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java index fc8f0ba525b..78bbf755e8f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java @@ -32,8 +32,6 @@ import java.util.function.Predicate; import java.util.stream.Collectors; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Table; @@ -58,8 +58,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.util.StringUtils; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * A Master-invoked {@code Chore} that computes the size of each snapshot which was created from @@ -67,7 +67,7 @@ import com.google.common.collect.Multimap; */ @InterfaceAudience.Private public class SnapshotQuotaObserverChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(SnapshotQuotaObserverChore.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotQuotaObserverChore.class); static final String SNAPSHOT_QUOTA_CHORE_PERIOD_KEY = "hbase.master.quotas.snapshot.chore.period"; static final int SNAPSHOT_QUOTA_CHORE_PERIOD_DEFAULT = 1000 * 60 * 5; // 5 minutes in millis diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java index bdacd334a8e..526f2e8602e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitingException.java @@ -17,9 +17,9 @@ package org.apache.hadoop.hbase.quotas; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An Exception that is thrown when a space quota is in violation. @@ -27,7 +27,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Public public class SpaceLimitingException extends QuotaExceededException { private static final long serialVersionUID = 2319438922387583600L; - private static final Log LOG = LogFactory.getLog(SpaceLimitingException.class); + private static final Logger LOG = LoggerFactory.getLogger(SpaceLimitingException.class); private static final String MESSAGE_PREFIX = SpaceLimitingException.class.getName() + ": "; private final String policyName; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java index e86e9ceecbb..d3be620877b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java @@ -22,12 +22,12 @@ import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Private public class SpaceQuotaRefresherChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(SpaceQuotaRefresherChore.class); + private static final Logger LOG = LoggerFactory.getLogger(SpaceQuotaRefresherChore.class); static final String POLICY_REFRESHER_CHORE_PERIOD_KEY = "hbase.regionserver.quotas.policy.refresher.chore.period"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java index dfaabec0260..664e26848a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java @@ -24,8 +24,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.TableName; @@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; @@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota; */ @InterfaceAudience.Private public class TableQuotaSnapshotStore implements QuotaSnapshotStore { - private static final Log LOG = LogFactory.getLog(TableQuotaSnapshotStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TableQuotaSnapshotStore.class); private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private final ReadLock rlock = lock.readLock(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java index f9813e55f43..d81d7d304a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableSpaceQuotaSnapshotNotifier.java @@ -18,18 +18,18 @@ package org.apache.hadoop.hbase.quotas; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link SpaceQuotaSnapshotNotifier} which uses the hbase:quota table. */ public class TableSpaceQuotaSnapshotNotifier implements SpaceQuotaSnapshotNotifier { - private static final Log LOG = LogFactory.getLog(TableSpaceQuotaSnapshotNotifier.class); + private static final Logger LOG = LoggerFactory.getLogger(TableSpaceQuotaSnapshotNotifier.class); private Connection conn; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java index 806cc763f6f..c85ba21db9c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/DisableTableViolationPolicyEnforcement.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.quotas.policies; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.quotas.SpaceLimitingException; import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy; @@ -34,7 +34,8 @@ import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement; */ @InterfaceAudience.Private public class DisableTableViolationPolicyEnforcement extends DefaultViolationPolicyEnforcement { - private static final Log LOG = LogFactory.getLog(DisableTableViolationPolicyEnforcement.class); + private static final Logger LOG = + LoggerFactory.getLogger(DisableTableViolationPolicyEnforcement.class); @Override public void enable() throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java index 5d5af2f38db..66dfee950bd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/policies/NoWritesCompactionsViolationPolicyEnforcement.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.quotas.policies; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy; import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement; @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement; @InterfaceAudience.Private public class NoWritesCompactionsViolationPolicyEnforcement extends NoWritesViolationPolicyEnforcement { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( NoWritesCompactionsViolationPolicyEnforcement.class); private AtomicBoolean disableCompactions = new AtomicBoolean(false); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java index 4614935cfc5..bf150a4c1d7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java @@ -24,12 +24,12 @@ import java.util.List; import java.util.NavigableSet; import java.util.SortedSet; -import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; import org.apache.hadoop.hbase.exceptions.UnexpectedStateException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -193,7 +193,7 @@ public abstract class AbstractMemStore implements MemStore { return conf; } - protected void dump(Log log) { + protected void dump(Logger log) { active.dump(log); snapshot.dump(log); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java index 76840b70b41..2fdab813132 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java @@ -22,10 +22,10 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.CellSink; /** @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.CellSink; @InterfaceAudience.Private public abstract class AbstractMultiFileWriter implements CellSink, ShipperListener { - private static final Log LOG = LogFactory.getLog(AbstractMultiFileWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiFileWriter.class); /** Factory that is used to produce single StoreFile.Writer-s */ protected WriterFactory writerFactory; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java index ac0379bf963..4eb3419ad8c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java @@ -21,11 +21,11 @@ import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.ipc.PriorityFunction; import org.apache.hadoop.hbase.ipc.QosPriority; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest; @@ -67,8 +67,8 @@ import org.apache.hadoop.hbase.security.User; //to figure out whether it is a meta region or not. @InterfaceAudience.Private public class AnnotationReadingPriorityFunction implements PriorityFunction { - private static final Log LOG = - LogFactory.getLog(AnnotationReadingPriorityFunction.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(AnnotationReadingPriorityFunction.class.getName()); /** Used to control the scan delay, currently sqrt(numNextCall * weight) */ public static final String SCAN_VTIME_WEIGHT_CONF_KEY = "hbase.ipc.server.scan.vtime.weight"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java index 0b251153dfc..d51d29441ee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; /** @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class BusyRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy { - private static final Log LOG = LogFactory.getLog(BusyRegionSplitPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(BusyRegionSplitPolicy.class); // Maximum fraction blocked write requests before region is considered for split private float maxBlockedRequests; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java index fe510ae4570..17e64b0678b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellFlatMap.java @@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.Cell; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.Comparator; @@ -28,8 +30,6 @@ import java.util.Map; import java.util.NavigableSet; import java.util.NavigableMap; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; /** @@ -44,7 +44,7 @@ import org.apache.commons.logging.LogFactory; */ @InterfaceAudience.Private public abstract class CellFlatMap implements NavigableMap { - private static final Log LOG = LogFactory.getLog(CellFlatMap.class); + private static final Logger LOG = LoggerFactory.getLogger(CellFlatMap.class); private final Comparator comparator; protected int minCellIdx = 0; // the index of the minimal cell (for sub-sets) protected int maxCellIdx = 0; // the index of the cell after the maximal cell (for sub-sets) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java index 89317c9bce5..3ce7a46e8a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java @@ -31,9 +31,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.HeapMemoryTuneObserver; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; @@ -47,7 +47,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; */ @InterfaceAudience.Private public class ChunkCreator { - private static final Log LOG = LogFactory.getLog(ChunkCreator.class); + private static final Logger LOG = LoggerFactory.getLogger(ChunkCreator.class); // monotonically increasing chunkid private AtomicInteger chunkID = new AtomicInteger(1); // maps the chunk against the monotonically increasing chunk id. We need to preserve the diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java index a8459da3953..e1435114258 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java @@ -37,8 +37,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.IntSupplier; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.conf.ConfigurationManager; import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; @@ -55,7 +53,8 @@ import org.apache.hadoop.hbase.util.StealJobQueue; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; */ @InterfaceAudience.Private public class CompactSplit implements CompactionRequester, PropagatingConfigurationObserver { - private static final Log LOG = LogFactory.getLog(CompactSplit.class); + private static final Logger LOG = LoggerFactory.getLogger(CompactSplit.class); // Configuration key for the large compaction threads. public final static String LARGE_COMPACTION_THREADS = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java index 8a0dee67e83..3074dad53de 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java @@ -19,14 +19,13 @@ package org.apache.hadoop.hbase.regionserver; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.executor.EventType; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class CompactedHFilesDischarger extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(CompactedHFilesDischarger.class); + private static final Logger LOG = LoggerFactory.getLogger(CompactedHFilesDischarger.class); private RegionServerServices regionServerServices; // Default is to use executor @VisibleForTesting diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java index f1232f84f24..7b885ff1afc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java @@ -26,14 +26,14 @@ import java.util.List; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MemoryCompactionPolicy; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -69,7 +69,7 @@ public class CompactingMemStore extends AbstractMemStore { "hbase.memstore.inmemoryflush.threshold.factor"; private static final double IN_MEMORY_FLUSH_THRESHOLD_FACTOR_DEFAULT = 0.02; - private static final Log LOG = LogFactory.getLog(CompactingMemStore.class); + private static final Logger LOG = LoggerFactory.getLogger(CompactingMemStore.class); private HStore store; private RegionServicesForStores regionServices; private CompactionPipeline pipeline; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java index 42931d067b0..68f99d37933 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionPipeline.java @@ -23,9 +23,9 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.util.ClassSize; */ @InterfaceAudience.Private public class CompactionPipeline { - private static final Log LOG = LogFactory.getLog(CompactionPipeline.class); + private static final Logger LOG = LoggerFactory.getLogger(CompactionPipeline.class); public final static long FIXED_OVERHEAD = ClassSize .align(ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + Bytes.SIZEOF_LONG); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java index 93658193c1e..5e8a8b3f1b2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java @@ -24,12 +24,11 @@ import java.util.LinkedList; import java.util.List; import java.util.SortedSet; -import org.apache.commons.logging.Log; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -281,7 +280,7 @@ public class CompositeImmutableSegment extends ImmutableSegment { /** * Dumps all cells of the segment into the given log */ - void dump(Log log) { + void dump(Logger log) { for (ImmutableSegment s : segments) { s.dump(log); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java index 13c344150ca..2ff7d58b827 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java @@ -24,9 +24,9 @@ import static org.apache.hadoop.hbase.HConstants.HFILE_BLOCK_CACHE_SIZE_KEY; import static org.apache.hadoop.hbase.regionserver.HeapMemoryManager.MEMSTORE_SIZE_MAX_RANGE_KEY; import static org.apache.hadoop.hbase.regionserver.HeapMemoryManager.MEMSTORE_SIZE_MIN_RANGE_KEY; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; @@ -93,7 +93,7 @@ class DefaultHeapMemoryTuner implements HeapMemoryTuner { // NEUTRAL(given that last tuner period was also NEUTRAL). private static final double TUNER_STEP_EPS = 1e-6; - private Log LOG = LogFactory.getLog(DefaultHeapMemoryTuner.class); + private Logger LOG = LoggerFactory.getLogger(DefaultHeapMemoryTuner.class); private TunerResult TUNER_RESULT = new TunerResult(true); private Configuration conf; private float sufficientMemoryLevel = DEFAULT_SUFFICIENT_MEMORY_LEVEL_VALUE; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java index 0e0276a1f4a..061e4d07330 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultMemStore.java @@ -24,8 +24,6 @@ import java.lang.management.RuntimeMXBean; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; */ @InterfaceAudience.Private public class DefaultMemStore extends AbstractMemStore { - private static final Log LOG = LogFactory.getLog(DefaultMemStore.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultMemStore.class); public final static long DEEP_OVERHEAD = ClassSize.align(AbstractMemStore.DEEP_OVERHEAD); public final static long FIXED_OVERHEAD = ClassSize.align(AbstractMemStore.FIXED_OVERHEAD); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java index e1f31bbe444..47d22b50d89 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java @@ -26,15 +26,14 @@ import java.util.List; import java.util.Optional; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; @@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; */ @InterfaceAudience.Private class DefaultStoreFileManager implements StoreFileManager { - private static final Log LOG = LogFactory.getLog(DefaultStoreFileManager.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultStoreFileManager.class); private final CellComparator cellComparator; private final CompactionConfiguration comConf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java index d666ba9e95b..b3f0a44dc8a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java @@ -22,9 +22,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.monitoring.MonitoredTask; @@ -36,7 +36,7 @@ import org.apache.hadoop.util.StringUtils; */ @InterfaceAudience.Private public class DefaultStoreFlusher extends StoreFlusher { - private static final Log LOG = LogFactory.getLog(DefaultStoreFlusher.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultStoreFlusher.class); private final Object flushLock = new Object(); public DefaultStoreFlusher(Configuration conf, HStore store) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java index 51790140aac..483c155f83f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.regionserver; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; /** @@ -41,8 +41,8 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private public class DelimitedKeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy { - private static final Log LOG = LogFactory - .getLog(DelimitedKeyPrefixRegionSplitPolicy.class); + private static final Logger LOG = LoggerFactory + .getLogger(DelimitedKeyPrefixRegionSplitPolicy.class); public static final String DELIMITER_KEY = "DelimitedKeyPrefixRegionSplitPolicy.delimiter"; private byte[] delimiter = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java index e4476d040b6..0f0117899f9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushAllLargeStoresPolicy.java @@ -21,10 +21,10 @@ import java.util.Collection; import java.util.HashSet; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link FlushPolicy} that only flushes store larger a given threshold. If no store is large @@ -33,7 +33,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class FlushAllLargeStoresPolicy extends FlushLargeStoresPolicy { - private static final Log LOG = LogFactory.getLog(FlushAllLargeStoresPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(FlushAllLargeStoresPolicy.class); @Override protected void configureForRegion(HRegion region) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java index 1610fd882f8..74bde60397f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushLargeStoresPolicy.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link FlushPolicy} that only flushes store larger a given threshold. If no store is large @@ -29,7 +29,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public abstract class FlushLargeStoresPolicy extends FlushPolicy { - private static final Log LOG = LogFactory.getLog(FlushLargeStoresPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(FlushLargeStoresPolicy.class); public static final String HREGION_COLUMNFAMILY_FLUSH_SIZE_LOWER_BOUND = "hbase.hregion.percolumnfamilyflush.size.lower.bound"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java index 2f273cab6c9..59f925fecf2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java @@ -19,12 +19,12 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.util.ReflectionUtils; @@ -37,7 +37,7 @@ import org.apache.hadoop.util.ReflectionUtils; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class FlushPolicyFactory { - private static final Log LOG = LogFactory.getLog(FlushPolicyFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(FlushPolicyFactory.class); public static final String HBASE_FLUSH_POLICY_KEY = "hbase.regionserver.flush.policy"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index e6ca462d8e1..d56a1c2b2bc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -27,8 +27,6 @@ import java.util.NavigableSet; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.hbase.util.IdLock; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The store implementation to save MOBs (medium objects), it extends the HStore. @@ -78,7 +78,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class HMobStore extends HStore { - private static final Log LOG = LogFactory.getLog(HMobStore.class); + private static final Logger LOG = LoggerFactory.getLogger(HMobStore.class); private MobCacheConfig mobCacheConfig; private Path homePath; private Path mobFamilyPath; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 98e9df66c84..bb01fe84437 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -70,8 +70,6 @@ import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Function; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -198,6 +196,8 @@ import org.apache.hadoop.io.MultipleIOException; import org.apache.hadoop.util.StringUtils; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.Nullable; @@ -219,7 +219,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; @SuppressWarnings("deprecation") @InterfaceAudience.Private public class HRegion implements HeapSize, PropagatingConfigurationObserver, Region { - private static final Log LOG = LogFactory.getLog(HRegion.class); + private static final Logger LOG = LoggerFactory.getLogger(HRegion.class); public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY = "hbase.hregion.scan.loadColumnFamiliesOnDemand"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java index 4fc9ffe5cde..4788ac95c68 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java @@ -28,8 +28,6 @@ import java.util.List; import java.util.Optional; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -54,7 +52,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import edu.umd.cs.findbugs.annotations.Nullable; @@ -65,7 +64,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; */ @InterfaceAudience.Private public class HRegionFileSystem { - private static final Log LOG = LogFactory.getLog(HRegionFileSystem.class); + private static final Logger LOG = LoggerFactory.getLogger(HRegionFileSystem.class); /** Name of the region info file that resides just under the region directory. */ public final static String REGION_INFO_FILE = ".regioninfo"; @@ -820,7 +819,7 @@ public class HRegionFileSystem { * @param LOG log to output information * @throws IOException if an unexpected exception occurs */ - void logFileSystemState(final Log LOG) throws IOException { + void logFileSystemState(final Logger LOG) throws IOException { FSUtils.logFileSystemState(fs, this.getRegionDir(), LOG); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 3a7680a99ac..9bb80133390 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -53,8 +53,6 @@ import java.util.function.Function; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -113,6 +111,7 @@ import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.LoadBalancer; import org.apache.hadoop.hbase.master.RegionState.State; @@ -172,7 +171,8 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @@ -224,7 +224,7 @@ public class HRegionServer extends HasThread implements // Time to pause if master says 'please hold'. Make configurable if needed. private static final int INIT_PAUSE_TIME_MS = 1000; - private static final Log LOG = LogFactory.getLog(HRegionServer.class); + private static final Logger LOG = LoggerFactory.getLogger(HRegionServer.class); /** * For testing only! Set to true to skip notifying region assignment to master . @@ -1663,7 +1663,7 @@ public class HRegionServer extends HasThread implements .setCurrentCompactedKVs(currentCompactedKVs) .setDataLocality(dataLocality) .setLastMajorCompactionTs(r.getOldestHfileTs(true)); - ((HRegion)r).setCompleteSequenceId(regionLoadBldr); + r.setCompleteSequenceId(regionLoadBldr); return regionLoadBldr.build(); } @@ -2198,7 +2198,7 @@ public class HRegionServer extends HasThread implements @Override public void postOpenDeployTasks(final PostOpenDeployContext context) throws KeeperException, IOException { - HRegion r = (HRegion) context.getRegion(); + HRegion r = context.getRegion(); long masterSystemTime = context.getMasterSystemTime(); rpcServices.checkOpen(); LOG.info("Post open deploy tasks for " + r.getRegionInfo().getRegionNameAsString()); @@ -2223,7 +2223,7 @@ public class HRegionServer extends HasThread implements + r.getRegionInfo().getRegionNameAsString()); } - triggerFlushInPrimaryRegion((HRegion)r); + triggerFlushInPrimaryRegion(r); LOG.debug("Finished post open deploy task for " + r.getRegionInfo().getRegionNameAsString()); } @@ -2373,15 +2373,15 @@ public class HRegionServer extends HasThread implements public void abort(String reason, Throwable cause) { String msg = "***** ABORTING region server " + this + ": " + reason + " *****"; if (cause != null) { - LOG.fatal(msg, cause); + LOG.error(HBaseMarkers.FATAL, msg, cause); } else { - LOG.fatal(msg); + LOG.error(HBaseMarkers.FATAL, msg); } this.abortRequested = true; // HBASE-4014: show list of coprocessors that were loaded to help debug // regionserver crashes.Note that we're implicitly using // java.util.HashSet's toString() method to print the coprocessor names. - LOG.fatal("RegionServer abort: loaded coprocessors are: " + + LOG.error(HBaseMarkers.FATAL, "RegionServer abort: loaded coprocessors are: " + CoprocessorHost.getLoadedCoprocessors()); // Try and dump metrics if abort -- might give clue as to how fatal came about.... try { @@ -2631,7 +2631,8 @@ public class HRegionServer extends HasThread implements } catch (ServiceException se) { IOException ioe = ProtobufUtil.getRemoteException(se); if (ioe instanceof ClockOutOfSyncException) { - LOG.fatal("Master rejected startup because clock is out of sync", ioe); + LOG.error(HBaseMarkers.FATAL, "Master rejected startup because clock is out of sync", + ioe); // Re-throw IOE will cause RS to abort throw ioe; } else if (ioe instanceof ServerNotRunningYetException) { @@ -3197,7 +3198,7 @@ public class HRegionServer extends HasThread implements Map> hstoreFiles = null; Exception exceptionToThrow = null; try{ - hstoreFiles = ((HRegion)regionToClose).close(false); + hstoreFiles = regionToClose.close(false); } catch (Exception e) { exceptionToThrow = e; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java index c2e1111e190..d3509c2dbd1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java @@ -18,11 +18,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.LocalHBaseCluster; @@ -34,7 +33,7 @@ import org.apache.hadoop.hbase.util.ServerCommandLine; */ @InterfaceAudience.Private public class HRegionServerCommandLine extends ServerCommandLine { - private static final Log LOG = LogFactory.getLog(HRegionServerCommandLine.class); + private static final Logger LOG = LoggerFactory.getLogger(HRegionServerCommandLine.class); private final Class regionServerClass; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 80f91c8b68e..a5d4b4d2e3c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -48,8 +48,7 @@ import java.util.function.Predicate; import java.util.function.ToLongFunction; import java.util.stream.Collectors; import java.util.stream.LongStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -77,6 +76,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.InvalidHFileException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker; @@ -98,7 +98,8 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection; @@ -133,7 +134,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat public static final int DEFAULT_COMPACTCHECKER_INTERVAL_MULTIPLIER = 1000; public static final int DEFAULT_BLOCKING_STOREFILE_COUNT = 10; - private static final Log LOG = LogFactory.getLog(HStore.class); + private static final Logger LOG = LoggerFactory.getLogger(HStore.class); protected final MemStore memstore; // This stores directory in the filesystem. @@ -2221,7 +2222,8 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat try { sf.deleteStoreFile(); } catch (IOException deleteEx) { - LOG.fatal("Failed to delete store file we committed, halting " + pathToDelete, ex); + LOG.error(HBaseMarkers.FATAL, "Failed to delete store file we committed, " + + "halting " + pathToDelete, ex); Runtime.getRuntime().halt(1); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java index b405c86aab2..93e59cfe1fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java @@ -26,8 +26,6 @@ import java.util.OptionalLong; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -60,7 +59,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private public class HStoreFile implements StoreFile { - private static final Log LOG = LogFactory.getLog(HStoreFile.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HStoreFile.class.getName()); public static final String STORE_FILE_READER_NO_READAHEAD = "hbase.store.reader.no-readahead"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java index cfdb32dd8d5..abd9b461336 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java @@ -25,14 +25,14 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Server; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class HeapMemoryManager { - private static final Log LOG = LogFactory.getLog(HeapMemoryManager.class); + private static final Logger LOG = LoggerFactory.getLogger(HeapMemoryManager.class); private static final int CONVERT_TO_PERCENTAGE = 100; private static final int CLUSTER_MINIMUM_MEMORY_THRESHOLD = (int) (CONVERT_TO_PERCENTAGE * HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java index 3164e1c83f0..19a63b45d22 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java @@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.procedure2.util.StringUtils; @@ -45,8 +45,9 @@ import org.apache.hadoop.hbase.procedure2.util.StringUtils; */ @InterfaceAudience.Private public class IncreasingToUpperBoundRegionSplitPolicy extends ConstantSizeRegionSplitPolicy { + private static final Logger LOG = + LoggerFactory.getLogger(IncreasingToUpperBoundRegionSplitPolicy.class); - private static final Log LOG = LogFactory.getLog(IncreasingToUpperBoundRegionSplitPolicy.class); protected long initialSize; @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java index 634bd88e0c0..660da57080d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java @@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.regionserver; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A custom RegionSplitPolicy implementing a SplitPolicy that groups @@ -32,8 +32,8 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class KeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundRegionSplitPolicy { - private static final Log LOG = LogFactory - .getLog(KeyPrefixRegionSplitPolicy.class); + private static final Logger LOG = LoggerFactory + .getLogger(KeyPrefixRegionSplitPolicy.class); @Deprecated public static final String PREFIX_LENGTH_KEY_DEPRECATED = "prefix_split_key_policy.prefix_length"; public static final String PREFIX_LENGTH_KEY = "KeyPrefixRegionSplitPolicy.prefix_length"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java index f26575d6c39..cdd92a6e108 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java @@ -27,11 +27,11 @@ import java.util.Comparator; import java.util.List; import java.util.PriorityQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; /** @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState; @InterfaceAudience.Private public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner implements KeyValueScanner, InternalScanner { - private static final Log LOG = LogFactory.getLog(KeyValueHeap.class); + private static final Logger LOG = LoggerFactory.getLogger(KeyValueHeap.class); protected PriorityQueue heap = null; // Holds the scanners when a ever a eager close() happens. All such eagerly closed // scans are collected and when the final scanner.close() happens will perform the diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java index a91a27156ab..f7ee4ef32e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java @@ -18,12 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.apache.hadoop.hbase.util.HasThread; - +import java.io.IOException; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.Map; @@ -31,7 +26,12 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; -import java.io.IOException; +import org.apache.hadoop.hbase.log.HBaseMarkers; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.util.HasThread; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Leases @@ -54,7 +54,7 @@ import java.io.IOException; */ @InterfaceAudience.Private public class Leases extends HasThread { - private static final Log LOG = LogFactory.getLog(Leases.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(Leases.class.getName()); public static final int MIN_WAIT_TIME = 100; private final Map leases = new ConcurrentHashMap<>(); @@ -63,7 +63,7 @@ public class Leases extends HasThread { /** * Creates a lease monitor - * + * * @param leaseCheckFrequency - how often the lease should be checked * (milliseconds) */ @@ -98,7 +98,7 @@ public class Leases extends HasThread { } catch (ConcurrentModificationException e) { continue; } catch (Throwable e) { - LOG.fatal("Unexpected exception killed leases thread", e); + LOG.error(HBaseMarkers.FATAL, "Unexpected exception killed leases thread", e); break; } @@ -291,11 +291,13 @@ public class Leases extends HasThread { return this.leaseName.hashCode(); } + @Override public long getDelay(TimeUnit unit) { return unit.convert(this.expirationTime - EnvironmentEdgeManager.currentTime(), TimeUnit.MILLISECONDS); } + @Override public int compareTo(Delayed o) { long delta = this.getDelay(TimeUnit.MILLISECONDS) - o.getDelay(TimeUnit.MILLISECONDS); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java index 451b8869850..d4561ed895c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java @@ -25,8 +25,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL; @@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.util.HasThread; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private @VisibleForTesting public class LogRoller extends HasThread implements Closeable { - private static final Log LOG = LogFactory.getLog(LogRoller.class); + private static final Logger LOG = LoggerFactory.getLogger(LogRoller.class); private final ReentrantLock rollLock = new ReentrantLock(); private final AtomicBoolean rollLog = new AtomicBoolean(false); private final ConcurrentHashMap walNeedsRoll = new ConcurrentHashMap<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java index b262328d72b..42302b2a5cc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java @@ -18,10 +18,10 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * MemStoreCompactionStrategy is the root of a class hierarchy which defines the strategy for @@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public abstract class MemStoreCompactionStrategy { - protected static final Log LOG = LogFactory.getLog(MemStoreCompactionStrategy.class); + protected static final Logger LOG = LoggerFactory.getLogger(MemStoreCompactionStrategy.class); // The upper bound for the number of segments we store in the pipeline prior to merging. public static final String COMPACTING_MEMSTORE_THRESHOLD_KEY = "hbase.hregion.compacting.pipeline.segments.limit"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java index bb0dc36236c..9278f19268c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java @@ -18,19 +18,20 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MemoryCompactionPolicy; -import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * The ongoing MemStore Compaction manager, dispatches a solo running compaction and interrupts @@ -55,7 +56,7 @@ public class MemStoreCompactor { + ClassSize.ATOMIC_BOOLEAN // isInterrupted (the internals) ); - private static final Log LOG = LogFactory.getLog(MemStoreCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(MemStoreCompactor.class); private CompactingMemStore compactingMemStore; // a static version of the segment list from the pipeline diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java index 0f96936d981..b081d773302 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactorSegmentsIterator.java @@ -25,14 +25,13 @@ import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; /** @@ -41,8 +40,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; */ @InterfaceAudience.Private public class MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator { - - private static final Log LOG = LogFactory.getLog(MemStoreCompactorSegmentsIterator.class); + private static final Logger LOG = + LoggerFactory.getLogger(MemStoreCompactorSegmentsIterator.class); private final List kvs = new ArrayList<>(); private boolean hasMore = true; @@ -84,6 +83,7 @@ public class MemStoreCompactorSegmentsIterator extends MemStoreSegmentsIterator return kvsIterator.next(); } + @Override public void close() { try { compactingScanner.close(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java index a314848cab7..9e352ef4283 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java @@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.HConstants; @@ -55,6 +53,8 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Thread that flushes cache on request @@ -67,7 +67,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private class MemStoreFlusher implements FlushRequester { - private static final Log LOG = LogFactory.getLog(MemStoreFlusher.class); + private static final Logger LOG = LoggerFactory.getLogger(MemStoreFlusher.class); private Configuration conf; // These two data members go together. Any entry in the one must have diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java index 38f79533a5f..ca079ea8c01 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java @@ -28,14 +28,13 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** @@ -66,7 +65,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private public class MemStoreLABImpl implements MemStoreLAB { - static final Log LOG = LogFactory.getLog(MemStoreLABImpl.class); + static final Logger LOG = LoggerFactory.getLogger(MemStoreLABImpl.class); private AtomicReference curChunk = new AtomicReference<>(); // Lock to manage multiple handlers requesting for a chunk diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java index f65bb66a9ea..b643ecfb379 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java @@ -26,8 +26,6 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HDFSBlocksDistribution; @@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hdfs.DFSHedgedReadMetrics; import org.apache.hadoop.metrics2.MetricsExecutor; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Impl for exposing HRegionServer Information through Hadoop's metrics 2 system. @@ -54,7 +54,7 @@ import org.apache.yetus.audience.InterfaceAudience; class MetricsRegionServerWrapperImpl implements MetricsRegionServerWrapper { - private static final Log LOG = LogFactory.getLog(MetricsRegionServerWrapperImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionServerWrapperImpl.class); private final HRegionServer regionServer; private final MetricsWALSource metricsWALSource; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java index 7f37bbf9b25..2aa1a82dc07 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java @@ -27,19 +27,19 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.metrics2.MetricsExecutor; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable { - private static final Log LOG = LogFactory.getLog(MetricsRegionWrapperImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsRegionWrapperImpl.class); public static final int PERIOD = 45; public static final String UNKNOWN = "unknown"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java index 07ff281ce2f..a25ef3b6c26 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java @@ -24,9 +24,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import java.util.LinkedList; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.ClassSize; */ @InterfaceAudience.Private public class MultiVersionConcurrencyControl { - private static final Log LOG = LogFactory.getLog(MultiVersionConcurrencyControl.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiVersionConcurrencyControl.class); final AtomicLong readPoint = new AtomicLong(0); final AtomicLong writePoint = new AtomicLong(0); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 1bcb70f3db2..584d0a24186 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -44,8 +44,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import org.apache.commons.lang3.mutable.MutableObject; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferCell; @@ -99,6 +97,7 @@ import org.apache.hadoop.hbase.ipc.RpcServerFactory; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterRpcServices; import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement; import org.apache.hadoop.hbase.quotas.OperationQuota; @@ -129,6 +128,8 @@ import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache; @@ -234,7 +235,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe public class RSRpcServices implements HBaseRPCErrorHandler, AdminService.BlockingInterface, ClientService.BlockingInterface, PriorityFunction, ConfigurationObserver { - protected static final Log LOG = LogFactory.getLog(RSRpcServices.class); + protected static final Logger LOG = LoggerFactory.getLogger(RSRpcServices.class); /** RPC scheduler to use for the region server. */ public static final String REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS = @@ -793,7 +794,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, try { Get get = ProtobufUtil.toGet(action.getGet()); if (context != null) { - r = get(get, ((HRegion) region), closeCallBack, context); + r = get(get, (region), closeCallBack, context); } else { r = region.get(get); } @@ -1051,7 +1052,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, for (Cell metaCell : metaCells) { CompactionDescriptor compactionDesc = WALEdit.getCompaction(metaCell); boolean isDefaultReplica = RegionReplicaUtil.isDefaultReplica(region.getRegionInfo()); - HRegion hRegion = (HRegion)region; + HRegion hRegion = region; if (compactionDesc != null) { // replay the compaction. Remove the files from stores only if we are the primary // region replica (thus own the files) @@ -1485,8 +1486,9 @@ public class RSRpcServices implements HBaseRPCErrorHandler, || (e.getMessage() != null && e.getMessage().contains( "java.lang.OutOfMemoryError"))) { stop = true; - LOG.fatal("Run out of memory; " + RSRpcServices.class.getSimpleName() - + " will abort itself immediately", e); + LOG.error(HBaseMarkers.FATAL, "Run out of memory; " + + RSRpcServices.class.getSimpleName() + " will abort itself immediately", + e); } } finally { if (stop) { @@ -1551,7 +1553,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, try { checkOpen(); requestCount.increment(); - HRegion region = (HRegion) getRegion(request.getRegion()); + HRegion region = getRegion(request.getRegion()); // Quota support is enabled, the requesting user is not system/super user // and a quota policy is enforced that disables compactions. if (QuotaUtil.isQuotaEnabled(getConfiguration()) && @@ -1598,7 +1600,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, try { checkOpen(); requestCount.increment(); - HRegion region = (HRegion) getRegion(request.getRegion()); + HRegion region = getRegion(request.getRegion()); LOG.info("Flushing " + region.getRegionInfo().getRegionNameAsString()); boolean shouldFlush = true; if (request.hasIfOlderThanTs()) { @@ -1663,7 +1665,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, RegionInfo info = region.getRegionInfo(); byte[] bestSplitRow = null; if (request.hasBestSplitRow() && request.getBestSplitRow()) { - HRegion r = (HRegion) region; + HRegion r = region; region.startRegionOperation(Operation.SPLIT_REGION); r.forceSplit(null); bestSplitRow = r.checkSplit(); @@ -2371,7 +2373,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } if (existence == null) { if (context != null) { - r = get(clientGet, ((HRegion) region), null, context); + r = get(clientGet, (region), null, context); } else { // for test purpose r = region.get(clientGet); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index 1717093ca91..fb87e51f749 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -32,8 +32,6 @@ import java.util.regex.Matcher; import org.apache.commons.collections4.map.AbstractReferenceMap; import org.apache.commons.collections4.map.ReferenceMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -90,6 +88,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implements the coprocessor environment and runtime support for coprocessors @@ -99,7 +99,7 @@ import org.apache.yetus.audience.InterfaceAudience; public class RegionCoprocessorHost extends CoprocessorHost { - private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionCoprocessorHost.class); // The shared data map private static final ReferenceMap> SHARED_DATA_MAP = new ReferenceMap<>(AbstractReferenceMap.ReferenceStrength.HARD, @@ -141,6 +141,7 @@ public class RegionCoprocessorHost return region; } + @Override public OnlineRegions getOnlineRegions() { return this.services; } @@ -208,6 +209,7 @@ public class RegionCoprocessorHost * @return An instance of RegionServerServices, an object NOT for general user-space Coprocessor * consumption. */ + @Override public RegionServerServices getRegionServerServices() { return this.rsServices; } @@ -551,7 +553,7 @@ public class RegionCoprocessorHost } }); } catch (IOException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); } } @@ -586,7 +588,7 @@ public class RegionCoprocessorHost } }); } catch (IOException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java index 9395b2e1c6b..dc1708cfbc3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java @@ -21,8 +21,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import com.google.protobuf.Service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SharedConnection; @@ -41,12 +40,14 @@ import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.security.User; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class RegionServerCoprocessorHost extends CoprocessorHost { - private static final Log LOG = LogFactory.getLog(RegionServerCoprocessorHost.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionServerCoprocessorHost.class); private RegionServerServices rsServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java index 89847f977ab..264d9858def 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java @@ -26,8 +26,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -53,7 +51,8 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; @@ -94,7 +93,7 @@ public class SecureBulkLoadManager { private static final int RANDOM_WIDTH = 320; private static final int RANDOM_RADIX = 32; - private static final Log LOG = LogFactory.getLog(SecureBulkLoadManager.class); + private static final Logger LOG = LoggerFactory.getLogger(SecureBulkLoadManager.class); private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx"); private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java index c054666395f..121cbcae628 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java @@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.regionserver; import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Objects; import java.util.SortedSet; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -343,9 +343,9 @@ public abstract class Segment { /** * Dumps all cells of the segment into the given log */ - void dump(Log log) { + void dump(Logger log) { for (Cell cell: getCellSet()) { - log.debug(cell); + log.debug(Objects.toString(cell)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java index fe6d01961e3..1b93df936b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java @@ -23,13 +23,13 @@ import java.util.Date; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.NonceKey; @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private public class ServerNonceManager { public static final String HASH_NONCE_GRACE_PERIOD_KEY = "hbase.server.hashNonce.gracePeriod"; - private static final Log LOG = LogFactory.getLog(ServerNonceManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ServerNonceManager.class); /** The time to wait in an extremely unlikely case of a conflict with a running op. * Only here so that tests could override it and not wait. */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java index 58d6327a052..c4335f66d5d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java @@ -23,13 +23,14 @@ import java.lang.reflect.Field; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.Stoppable; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.util.ShutdownHookManager; import org.apache.hadoop.hbase.util.Threads; @@ -39,7 +40,7 @@ import org.apache.hadoop.hbase.util.Threads; */ @InterfaceAudience.Private public class ShutdownHook { - private static final Log LOG = LogFactory.getLog(ShutdownHook.class); + private static final Logger LOG = LoggerFactory.getLogger(ShutdownHook.class); private static final String CLIENT_FINALIZER_DATA_METHOD = "clientFinalizer"; /** @@ -211,10 +212,12 @@ public class ShutdownHook { } return hdfsClientFinalizer; } catch (NoSuchFieldException nsfe) { - LOG.fatal("Couldn't find field 'clientFinalizer' in FileSystem!", nsfe); + LOG.error(HBaseMarkers.FATAL, "Couldn't find field 'clientFinalizer' in FileSystem!", + nsfe); throw new RuntimeException("Failed to suppress HDFS shutdown hook"); } catch (IllegalAccessException iae) { - LOG.fatal("Couldn't access field 'clientFinalizer' in FileSystem!", iae); + LOG.error(HBaseMarkers.FATAL, "Couldn't access field 'clientFinalizer' in FileSystem!", + iae); throw new RuntimeException("Failed to suppress HDFS shutdown hook"); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java index 924b8fe4c98..067ad920d5e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java @@ -23,9 +23,9 @@ import java.io.InterruptedIOException; import java.net.ConnectException; import java.net.SocketTimeoutException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private public class SplitLogWorker implements Runnable { - private static final Log LOG = LogFactory.getLog(SplitLogWorker.class); + private static final Logger LOG = LoggerFactory.getLogger(SplitLogWorker.class); Thread worker; // thread pool which executes recovery work diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java index 4b1ae31233c..ce5c05119a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver; import java.security.PrivilegedAction; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -30,7 +28,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTran import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode; @@ -39,7 +38,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto */ @InterfaceAudience.Private class SplitRequest implements Runnable { - private static final Log LOG = LogFactory.getLog(SplitRequest.class); + private static final Logger LOG = LoggerFactory.getLogger(SplitRequest.class); private final RegionInfo parent; private final byte[] midKey; private final HRegionServer server; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java index 5b83acee4f1..a1fe2d1e224 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java @@ -25,14 +25,14 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HalfStoreFileReader; @@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.FSUtils; */ @InterfaceAudience.Private public class StoreFileInfo { - private static final Log LOG = LogFactory.getLog(StoreFileInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(StoreFileInfo.class); /** * A non-capture group, for hfiles, so that this can be embedded. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java index a9d9292c189..924e285700f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java @@ -28,8 +28,6 @@ import java.util.Optional; import java.util.SortedSet; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -53,7 +51,8 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -62,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.PHOENIX) @InterfaceStability.Evolving public class StoreFileReader { - private static final Log LOG = LogFactory.getLog(StoreFileReader.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StoreFileReader.class.getName()); protected BloomFilter generalBloomFilter = null; protected BloomFilter deleteFamilyBloomFilter = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java index 26977e40602..ecc812e14d0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java @@ -30,8 +30,6 @@ import java.net.InetSocketAddress; import java.util.UUID; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -52,7 +50,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.RowBloomContext; import org.apache.hadoop.hbase.util.RowColBloomContext; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** @@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; */ @InterfaceAudience.Private public class StoreFileWriter implements CellSink, ShipperListener { - private static final Log LOG = LogFactory.getLog(StoreFileWriter.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StoreFileWriter.class.getName()); private static final Pattern dash = Pattern.compile("-"); private final BloomFilterWriter generalBloomFilterWriter; private final BloomFilterWriter deleteFamilyBloomFilterWriter; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index 571e2c0e3c4..bdcf046c433 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -27,8 +27,6 @@ import java.util.OptionalInt; import java.util.concurrent.CountDownLatch; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; @@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.util.CollectionUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private public class StoreScanner extends NonReversedNonLazyKeyValueScanner implements KeyValueScanner, InternalScanner, ChangedReadersObserver { - private static final Log LOG = LogFactory.getLog(StoreScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(StoreScanner.class); // In unit tests, the store could be null protected final HStore store; private final CellComparator comparator; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java index 2ada5a99f9c..a32a49302b5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreUtils.java @@ -24,12 +24,12 @@ import java.util.Optional; import java.util.OptionalInt; import java.util.OptionalLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility functions for region server storage layer. @@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class StoreUtils { - private static final Log LOG = LogFactory.getLog(StoreUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(StoreUtils.class); /** * Creates a deterministic hash code for store file collection. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java index 576aea1f8f8..18f7e185eed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java @@ -23,11 +23,11 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.StringUtils; @@ -44,7 +44,7 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private public class StorefileRefresherChore extends ScheduledChore { - private static final Log LOG = LogFactory.getLog(StorefileRefresherChore.class); + private static final Logger LOG = LoggerFactory.getLogger(StorefileRefresherChore.class); /** * The period (in milliseconds) for refreshing the store files for the secondary regions. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java index 732fc061e84..fc0598d89ac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java @@ -24,13 +24,13 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; /** @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private public abstract class StripeMultiFileWriter extends AbstractMultiFileWriter { - private static final Log LOG = LogFactory.getLog(StripeMultiFileWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(StripeMultiFileWriter.class); protected final CellComparator comparator; protected List existingWriters; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java index eb2a9b6d96e..61deb0b93ce 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java @@ -18,9 +18,9 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration; @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration; */ @InterfaceAudience.Private public class StripeStoreConfig { - private static final Log LOG = LogFactory.getLog(StripeStoreConfig.class); + private static final Logger LOG = LoggerFactory.getLogger(StripeStoreConfig.class); /** The maximum number of files to compact within a stripe; same as for regular compaction. */ public static final String MAX_FILES_KEY = "hbase.store.stripe.compaction.maxFiles"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java index 8c2636355cd..03874e1d4c4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java @@ -21,14 +21,14 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy; import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor; @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class StripeStoreEngine extends StoreEngine { - private static final Log LOG = LogFactory.getLog(StripeStoreEngine.class); + private static final Logger LOG = LoggerFactory.getLogger(StripeStoreEngine.class); private StripeStoreConfig config; @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java index 737e1a6df59..6a5e84c74a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java @@ -31,8 +31,6 @@ import java.util.Map; import java.util.Optional; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -44,7 +42,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ConcatenatedLists; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; @@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; @InterfaceAudience.Private public class StripeStoreFileManager implements StoreFileManager, StripeCompactionPolicy.StripeInformationProvider { - private static final Log LOG = LogFactory.getLog(StripeStoreFileManager.class); + private static final Logger LOG = LoggerFactory.getLogger(StripeStoreFileManager.class); /** * The file metadata fields that contain the stripe information. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java index a227979e579..d2333451e3e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; @@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Private public class StripeStoreFlusher extends StoreFlusher { - private static final Log LOG = LogFactory.getLog(StripeStoreFlusher.class); + private static final Logger LOG = LoggerFactory.getLogger(StripeStoreFlusher.class); private final Object flushLock = new Object(); private final StripeCompactionPolicy policy; private final StripeCompactionPolicy.StripeInformationProvider stripes; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java index c5ef127866e..a8ffc2e4ea7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/AbstractMultiOutputCompactor.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.regionserver.compactions; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.StoreFileWriter; import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Base class for implementing a Compactor which will generate multiple output files after @@ -40,7 +40,7 @@ import org.apache.yetus.audience.InterfaceAudience; public abstract class AbstractMultiOutputCompactor extends Compactor { - private static final Log LOG = LogFactory.getLog(AbstractMultiOutputCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiOutputCompactor.class); public AbstractMultiOutputCompactor(Configuration conf, HStore store) { super(conf, store); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java index b8194eb55bd..d2a86c1c72c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.regionserver.compactions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; /** @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; @InterfaceAudience.Private public class CompactionConfiguration { - private static final Log LOG = LogFactory.getLog(CompactionConfiguration.class); + private static final Logger LOG = LoggerFactory.getLogger(CompactionConfiguration.class); public static final String HBASE_HSTORE_COMPACTION_RATIO_KEY = "hbase.hstore.compaction.ratio"; public static final String HBASE_HSTORE_COMPACTION_RATIO_OFFPEAK_KEY = @@ -142,7 +142,7 @@ public class CompactionConfiguration { this.dateTieredCompactionWindowFactory = conf.get( DATE_TIERED_COMPACTION_WINDOW_FACTORY_CLASS_KEY, DEFAULT_DATE_TIERED_COMPACTION_WINDOW_FACTORY_CLASS.getName()); - LOG.info(this); + LOG.info(toString()); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java index 014d4d1daf8..9703c3b5285 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java @@ -29,8 +29,6 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -62,7 +60,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; /** @@ -71,7 +70,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; */ @InterfaceAudience.Private public abstract class Compactor { - private static final Log LOG = LogFactory.getLog(Compactor.class); + private static final Logger LOG = LoggerFactory.getLogger(Compactor.class); protected static final long COMPACTION_PROGRESS_LOG_INTERVAL = 60 * 1000; protected volatile CompactionProgress progress; protected final Configuration conf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java index a0c3e309c98..a6f1b9eb4b8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java @@ -25,8 +25,6 @@ import java.util.Collections; import java.util.List; import java.util.OptionalLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HDFSBlocksDistribution; @@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class DateTieredCompactionPolicy extends SortedCompactionPolicy { - private static final Log LOG = LogFactory.getLog(DateTieredCompactionPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(DateTieredCompactionPolicy.class); private final RatioBasedCompactionPolicy compactionPolicyPerWindow; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java index 09dda90d905..21eaa941cd5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactor.java @@ -21,8 +21,6 @@ import java.io.IOException; import java.util.List; import java.util.OptionalLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.regionserver.DateTieredMultiFileWriter; @@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.regionserver.StoreUtils; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.security.User; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This compactor will generate StoreFile for different time ranges. @@ -39,7 +39,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class DateTieredCompactor extends AbstractMultiOutputCompactor { - private static final Log LOG = LogFactory.getLog(DateTieredCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(DateTieredCompactor.class); public DateTieredCompactor(Configuration conf, HStore store) { super(conf, store); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java index 14539b0e2ed..41b819b5d5b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java @@ -21,8 +21,6 @@ import java.io.IOException; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.regionserver.HStore; @@ -33,7 +31,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.security.User; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ @InterfaceAudience.Private public class DefaultCompactor extends Compactor { - private static final Log LOG = LogFactory.getLog(DefaultCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultCompactor.class); public DefaultCompactor(Configuration conf, HStore store) { super(conf, store); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java index b0942f60893..d9d10d98ae9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java @@ -23,12 +23,12 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class to pick which files if any to compact together. @@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class ExploringCompactionPolicy extends RatioBasedCompactionPolicy { - private static final Log LOG = LogFactory.getLog(ExploringCompactionPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(ExploringCompactionPolicy.class); /** * Constructor for ExploringCompactionPolicy. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java index 67c7a24aab1..6d2245cd7b9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java @@ -17,21 +17,21 @@ */ package org.apache.hadoop.hbase.regionserver.compactions; -import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath; /** * Exponential compaction window implementation. */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class ExponentialCompactionWindowFactory extends CompactionWindowFactory { - - private static final Log LOG = LogFactory.getLog(ExponentialCompactionWindowFactory.class); + private static final Logger LOG = + LoggerFactory.getLogger(ExponentialCompactionWindowFactory.class); public static final String BASE_WINDOW_MILLIS_KEY = "hbase.hstore.compaction.date.tiered.base.window.millis"; @@ -128,7 +128,7 @@ public class ExponentialCompactionWindowFactory extends CompactionWindowFactory windowsPerTier = conf.getInt(WINDOWS_PER_TIER_KEY, 4); maxTierAgeMillis = conf.getLong(MAX_TIER_AGE_MILLIS_KEY, comConf.getDateTieredMaxStoreFileAgeMillis()); - LOG.info(this); + LOG.info(toString()); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java index 032a9c614d9..32b40e1b059 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/FIFOCompactionPolicy.java @@ -23,14 +23,14 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.hadoop.hbase.regionserver.StoreUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -47,7 +47,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class FIFOCompactionPolicy extends ExploringCompactionPolicy { - private static final Log LOG = LogFactory.getLog(FIFOCompactionPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(FIFOCompactionPolicy.class); public FIFOCompactionPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java index 3cb88439527..b920de2b57d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.hbase.regionserver.compactions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @InterfaceAudience.Private public abstract class OffPeakHours { - private static final Log LOG = LogFactory.getLog(OffPeakHours.class); + private static final Logger LOG = LoggerFactory.getLogger(OffPeakHours.class); public static final OffPeakHours DISABLED = new OffPeakHours() { @Override public boolean isOffPeakHour() { return false; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java index dba0473419e..a6ea9b22f45 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java @@ -24,8 +24,6 @@ import java.util.Collection; import java.util.List; import java.util.OptionalLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.HStore; @@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.hadoop.hbase.regionserver.StoreUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The default algorithm for selecting files for compaction. @@ -43,7 +43,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class RatioBasedCompactionPolicy extends SortedCompactionPolicy { - private static final Log LOG = LogFactory.getLog(RatioBasedCompactionPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(RatioBasedCompactionPolicy.class); public RatioBasedCompactionPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java index f284489eaaa..d9b3dd45fd3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java @@ -17,14 +17,13 @@ import java.util.List; import java.util.OptionalInt; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.hadoop.hbase.regionserver.StoreUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2; @@ -36,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Private public abstract class SortedCompactionPolicy extends CompactionPolicy { - private static final Log LOG = LogFactory.getLog(SortedCompactionPolicy.class); + private static final Logger LOG = LoggerFactory.getLogger(SortedCompactionPolicy.class); public SortedCompactionPolicy(Configuration conf, StoreConfigInformation storeConfigInfo) { super(conf, storeConfigInfo); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java index b6de6783bcd..053920dd1c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java @@ -25,8 +25,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; @@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.ConcatenatedLists; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; /** @@ -50,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; */ @InterfaceAudience.Private public class StripeCompactionPolicy extends CompactionPolicy { - private final static Log LOG = LogFactory.getLog(StripeCompactionPolicy.class); + private final static Logger LOG = LoggerFactory.getLogger(StripeCompactionPolicy.class); // Policy used to compact individual stripes. private ExploringCompactionPolicy stripePolicy = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java index c9e591ea432..41e0a71b49d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver.compactions; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.regionserver.HStore; @@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is the placeholder for stripe compactor. The implementation, as well as the proper javadoc, @@ -41,7 +41,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class StripeCompactor extends AbstractMultiOutputCompactor { - private static final Log LOG = LogFactory.getLog(StripeCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(StripeCompactor.class); public StripeCompactor(Configuration conf, HStore store) { super(conf, store); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java index f48ee9260e2..7583b726afa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.regionserver.handler; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; @@ -32,7 +30,8 @@ import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode; /** @@ -45,7 +44,7 @@ public class CloseRegionHandler extends EventHandler { // after the user regions have closed. What // about the case where master tells us to shutdown a catalog region and we // have a running queue of user regions to close? - private static final Log LOG = LogFactory.getLog(CloseRegionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(CloseRegionHandler.class); private final RegionServerServices rsServices; private final RegionInfo regionInfo; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java index e4b3ed2d094..f408629534a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver.handler; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.client.RegionInfo; @@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployC import org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext; import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode; /** * Handles opening of a region on a region server. @@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto */ @InterfaceAudience.Private public class OpenRegionHandler extends EventHandler { - private static final Log LOG = LogFactory.getLog(OpenRegionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(OpenRegionHandler.class); protected final RegionServerServices rsServices; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java index 9f66be9ba63..ed1b2c760f9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver.handler; import java.io.IOException; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.executor.EventHandler; import org.apache.hadoop.hbase.executor.EventType; @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner; */ @InterfaceAudience.Private public class ParallelSeekHandler extends EventHandler { - private static final Log LOG = LogFactory.getLog(ParallelSeekHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(ParallelSeekHandler.class); private KeyValueScanner scanner; private Cell keyValue; private long readPoint; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java index 0d13aafe086..b917379930b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java @@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.regionserver.handler; import java.io.IOException; import java.io.InterruptedIOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.FlushRegionCallable; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; @InterfaceAudience.Private public class RegionReplicaFlushHandler extends EventHandler { - private static final Log LOG = LogFactory.getLog(RegionReplicaFlushHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionReplicaFlushHandler.class); private final ClusterConnection connection; private final RpcRetryingCallerFactory rpcRetryingCallerFactory; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java index 07e7de08196..49ab574ec52 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.regionserver.handler; import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.SplitLogCounters; @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.CancelableProgressable; */ @InterfaceAudience.Private public class WALSplitterHandler extends EventHandler { - private static final Log LOG = LogFactory.getLog(WALSplitterHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(WALSplitterHandler.class); private final ServerName serverName; private final CancelableProgressable reporter; private final AtomicInteger inProgressTasks; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java index 13ab8c86e49..a20a001e27c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java @@ -21,10 +21,10 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; @InterfaceAudience.Private @InterfaceStability.Unstable public class FlushSnapshotSubprocedure extends Subprocedure { - private static final Log LOG = LogFactory.getLog(FlushSnapshotSubprocedure.class); + private static final Logger LOG = LoggerFactory.getLogger(FlushSnapshotSubprocedure.class); private final List regions; private final SnapshotDescription snapshot; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java index 6a7d83ebe42..08335ab4f11 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java @@ -30,8 +30,6 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.DaemonThreadFactory; @@ -58,6 +56,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.snapshot.SnapshotCreationException; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This manager class handles the work dealing with snapshots for a {@link HRegionServer}. @@ -75,7 +75,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) @InterfaceStability.Unstable public class RegionServerSnapshotManager extends RegionServerProcedureManager { - private static final Log LOG = LogFactory.getLog(RegionServerSnapshotManager.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionServerSnapshotManager.class); /** Maximum number of snapshot region tasks that can run concurrently */ private static final String CONCURENT_SNAPSHOT_TASKS_KEY = "hbase.snapshot.region.concurrentTasks"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java index 55def0792bb..45e7267ed26 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java @@ -17,18 +17,18 @@ */ package org.apache.hadoop.hbase.regionserver.throttle; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.util.ReflectionUtils; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public final class CompactionThroughputControllerFactory { - - private static final Log LOG = LogFactory.getLog(CompactionThroughputControllerFactory.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompactionThroughputControllerFactory.class); public static final String HBASE_THROUGHPUT_CONTROLLER_KEY = "hbase.regionserver.throughput.controller"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java index 6311952bdee..fc75c583583 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java @@ -17,18 +17,18 @@ */ package org.apache.hadoop.hbase.regionserver.throttle; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.util.ReflectionUtils; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public final class FlushThroughputControllerFactory { - private static final Log LOG = LogFactory.getLog(FlushThroughputControllerFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(FlushThroughputControllerFactory.class); public static final String HBASE_FLUSH_THROUGHPUT_CONTROLLER_KEY = "hbase.regionserver.flush.throughput.controller"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java index c56b47409f7..b3c7bf37a24 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hbase.regionserver.throttle; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours; @@ -42,8 +42,8 @@ import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class PressureAwareCompactionThroughputController extends PressureAwareThroughputController { - private final static Log LOG = LogFactory - .getLog(PressureAwareCompactionThroughputController.class); + private final static Logger LOG = LoggerFactory + .getLogger(PressureAwareCompactionThroughputController.class); public static final String HBASE_HSTORE_COMPACTION_MAX_THROUGHPUT_HIGHER_BOUND = "hbase.hstore.compaction.throughput.higher.bound"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java index bdfa99d11fa..fac4e86e5d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hbase.regionserver.throttle; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours; @@ -40,7 +40,8 @@ import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class PressureAwareFlushThroughputController extends PressureAwareThroughputController { - private static final Log LOG = LogFactory.getLog(PressureAwareFlushThroughputController.class); + private static final Logger LOG = + LoggerFactory.getLogger(PressureAwareFlushThroughputController.class); public static final String HBASE_HSTORE_FLUSH_MAX_THROUGHPUT_UPPER_BOUND = "hbase.hstore.flush.throughput.upper.bound"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java index 78413361b41..ec90830c177 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java @@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver.throttle; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.Stoppable; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -33,7 +33,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public abstract class PressureAwareThroughputController extends Configured implements ThroughputController, Stoppable { - private static final Log LOG = LogFactory.getLog(PressureAwareThroughputController.class); + private static final Logger LOG = + LoggerFactory.getLogger(PressureAwareThroughputController.class); /** * Stores the information of one controlled compaction. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java index 992903656be..baa75903615 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java @@ -48,8 +48,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang3.mutable.MutableLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -62,6 +60,7 @@ import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.exceptions.TimeoutIOException; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.hadoop.hbase.util.Bytes; @@ -83,7 +82,8 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.util.StringUtils; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -115,7 +115,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceAudience.Private public abstract class AbstractFSWAL implements WAL { - private static final Log LOG = LogFactory.getLog(AbstractFSWAL.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractFSWAL.class); protected static final int DEFAULT_SLOW_SYNC_TIME_MS = 100; // in ms @@ -1117,8 +1117,8 @@ public abstract class AbstractFSWAL implements WAL { if (args[0].compareTo("--dump") == 0) { WALPrettyPrinter.run(Arrays.copyOfRange(args, 1, args.length)); } else if (args[0].compareTo("--perf") == 0) { - LOG.fatal("Please use the WALPerformanceEvaluation tool instead. i.e.:"); - LOG.fatal( + LOG.error(HBaseMarkers.FATAL, "Please use the WALPerformanceEvaluation tool instead. i.e.:"); + LOG.error(HBaseMarkers.FATAL, "\thbase org.apache.hadoop.hbase.wal.WALPerformanceEvaluation --iterations " + args[1]); System.exit(-1); } else if (args[0].compareTo("--split") == 0) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java index 256ced64bdf..befc5509fd5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java @@ -28,14 +28,14 @@ import java.util.concurrent.atomic.AtomicLong; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.crypto.Cipher; import org.apache.hadoop.hbase.io.crypto.Encryption; @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.util.FSUtils; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public abstract class AbstractProtobufLogWriter { - private static final Log LOG = LogFactory.getLog(AbstractProtobufLogWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractProtobufLogWriter.class); protected CompressionContext compressionContext; protected Configuration conf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java index a9c440dca07..bdf6333aa6e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java @@ -45,8 +45,6 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Supplier; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -64,7 +62,8 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.ipc.RemoteException; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel; import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop; @@ -131,7 +130,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.SingleThreadEvent @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class AsyncFSWAL extends AbstractFSWAL { - private static final Log LOG = LogFactory.getLog(AsyncFSWAL.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncFSWAL.class); private static final Comparator SEQ_COMPARATOR = (o1, o2) -> { int c = Long.compare(o1.getTxid(), o2.getTxid()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java index 454928bb843..aa585e338c4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java @@ -25,8 +25,6 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.function.Consumer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.wal.AsyncFSWALProvider; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel; import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup; @@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer; public class AsyncProtobufLogWriter extends AbstractProtobufLogWriter implements AsyncFSWALProvider.AsyncWriter { - private static final Log LOG = LogFactory.getLog(AsyncProtobufLogWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncProtobufLogWriter.class); private final EventLoopGroup eventLoopGroup; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index fd9d6c122ac..c0f454e0b93 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -35,8 +35,6 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -58,7 +56,8 @@ import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.htrace.core.TraceScope; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** @@ -101,7 +100,7 @@ public class FSHLog extends AbstractFSWAL { // syncs and appends have completed -- so the log roller can swap the WAL out under it. // // We use ring buffer sequence as txid of FSWALEntry and SyncFuture. - private static final Log LOG = LogFactory.getLog(FSHLog.class); + private static final Logger LOG = LoggerFactory.getLogger(FSHLog.class); /** * The nexus at which all incoming handlers meet. Does appends and sync with an ordering. Appends @@ -162,13 +161,13 @@ public class FSHLog extends AbstractFSWAL { @Override public void handleOnStartException(Throwable ex) { - LOG.error(ex); + LOG.error(ex.toString(), ex); throw new RuntimeException(ex); } @Override public void handleOnShutdownException(Throwable ex) { - LOG.error(ex); + LOG.error(ex.toString(), ex); throw new RuntimeException(ex); } } @@ -634,6 +633,7 @@ public class FSHLog extends AbstractFSWAL { /** * @return true if number of replicas for the WAL is lower than threshold */ + @Override protected boolean doCheckLogLowReplication() { boolean logRollNeeded = false; // if the number of replicas in HDFS has fallen below the configured diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java index ba9c0e6a695..b19f93b333c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java @@ -23,9 +23,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; @@ -37,7 +37,7 @@ import org.apache.hadoop.util.StringUtils; */ @InterfaceAudience.Private public class MetricsWAL implements WALActionsListener { - private static final Log LOG = LogFactory.getLog(MetricsWAL.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsWAL.class); private final MetricsWALSource source; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index c199484271f..6017a182f23 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -26,9 +26,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; @@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE @InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX, HBaseInterfaceAudience.CONFIG}) public class ProtobufLogReader extends ReaderBase { - private static final Log LOG = LogFactory.getLog(ProtobufLogReader.class); + private static final Logger LOG = LoggerFactory.getLogger(ProtobufLogReader.class); // public for WALFactory until we move everything to o.a.h.h.wal @InterfaceAudience.Private public static final byte[] PB_WAL_MAGIC = Bytes.toBytes("PWAL"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java index 7a135c9fe27..aeb2c19c25b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -21,15 +21,13 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; import java.io.OutputStream; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.CommonFSUtils; @@ -44,14 +42,14 @@ import org.apache.hadoop.hbase.wal.WAL.Entry; public class ProtobufLogWriter extends AbstractProtobufLogWriter implements FSHLogProvider.Writer { - private static final Log LOG = LogFactory.getLog(ProtobufLogWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(ProtobufLogWriter.class); protected FSDataOutputStream output; @Override public void append(Entry entry) throws IOException { entry.setCompressionContext(compressionContext); - ((WALKeyImpl)entry.getKey()).getBuilder(compressor). + entry.getKey().getBuilder(compressor). setFollowingKvCount(entry.getEdit().size()).build().writeDelimitedTo(output); for (Cell cell : entry.getEdit().getCells()) { // cellEncoder must assume little about the stream, since we write PB and cells in turn. @@ -68,7 +66,7 @@ public class ProtobufLogWriter extends AbstractProtobufLogWriter this.output.close(); } catch (NullPointerException npe) { // Can get a NPE coming up from down in DFSClient$DFSOutputStream#close - LOG.warn(npe); + LOG.warn(npe.toString(), npe); } this.output = null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java index 9a6bfd39583..f0573587a63 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -34,10 +32,12 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX}) public abstract class ReaderBase implements AbstractFSWALProvider.Reader { - private static final Log LOG = LogFactory.getLog(ReaderBase.class); + private static final Logger LOG = LoggerFactory.getLogger(ReaderBase.class); protected Configuration conf; protected FileSystem fs; protected Path path; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java index 2765f94995b..b1f17ad4ea0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java @@ -24,9 +24,9 @@ import java.security.KeyException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.EncryptionTest; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class SecureProtobufLogReader extends ProtobufLogReader { - private static final Log LOG = LogFactory.getLog(SecureProtobufLogReader.class); + private static final Logger LOG = LoggerFactory.getLogger(SecureProtobufLogReader.class); private Decryptor decryptor = null; private static List writerClsNames = new ArrayList<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java index 61586548ab3..f21c1f06313 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java @@ -30,11 +30,11 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ImmutableByteArray; @@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.util.ImmutableByteArray; @InterfaceAudience.Private class SequenceIdAccounting { - private static final Log LOG = LogFactory.getLog(SequenceIdAccounting.class); + private static final Logger LOG = LoggerFactory.getLogger(SequenceIdAccounting.class); /** * This lock ties all operations on {@link SequenceIdAccounting#flushingSequenceIds} and * {@link #lowestUnflushedSequenceIds} Maps. {@link #lowestUnflushedSequenceIds} has the diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java index 0edd5d4e19d..7b6182e6b60 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java @@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.RegionInfo; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implements the coprocessor environment and runtime support for coprocessors @@ -46,7 +46,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class WALCoprocessorHost extends CoprocessorHost { - private static final Log LOG = LogFactory.getLog(WALCoprocessorHost.class); + private static final Logger LOG = LoggerFactory.getLogger(WALCoprocessorHost.class); /** * Encapsulation of the environment of each coprocessor diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java index 518ee8fbe54..c3b67faf95b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java @@ -22,15 +22,14 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; import java.util.NavigableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor; @@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe */ @InterfaceAudience.Private public class WALUtil { - private static final Log LOG = LogFactory.getLog(WALUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(WALUtil.class); private WALUtil() { // Shut down construction of this class. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java index a7637b14620..c390d0967da 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.replication; import java.io.IOException; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AbstractService; @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Abstract public abstract class BaseReplicationEndpoint extends AbstractService implements ReplicationEndpoint { - private static final Log LOG = LogFactory.getLog(BaseReplicationEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(BaseReplicationEndpoint.class); public static final String REPLICATION_WALENTRYFILTER_CONFIG_KEY = "hbase.replication.source.custom.walentryfilters"; protected Context ctx; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java index 5f465ce6c30..d5506b17d8c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java @@ -22,21 +22,20 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.wal.WALEdit; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor; public class BulkLoadCellFilter { - private static final Log LOG = LogFactory.getLog(BulkLoadCellFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(BulkLoadCellFilter.class); private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java index 4985b82ae80..bd5c529092d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java @@ -24,8 +24,6 @@ import java.util.Collections; import java.util.List; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.Abortable; @@ -37,6 +35,8 @@ import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.AuthFailedException; import org.apache.zookeeper.KeeperException.ConnectionLossException; import org.apache.zookeeper.KeeperException.SessionExpiredException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link BaseReplicationEndpoint} for replication endpoints whose @@ -48,7 +48,7 @@ import org.apache.zookeeper.KeeperException.SessionExpiredException; public abstract class HBaseReplicationEndpoint extends BaseReplicationEndpoint implements Abortable { - private static final Log LOG = LogFactory.getLog(HBaseReplicationEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseReplicationEndpoint.class); private ZKWatcher zkw = null; // FindBugs: MT_CORRECTNESS diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java index 5068cce8ef6..b540416fec5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java @@ -22,12 +22,12 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; @@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.wal.WAL.Entry; @InterfaceAudience.Private public class NamespaceTableCfWALEntryFilter implements WALEntryFilter, WALCellFilter { - private static final Log LOG = LogFactory.getLog(NamespaceTableCfWALEntryFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(NamespaceTableCfWALEntryFilter.class); private final ReplicationPeer peer; private BulkLoadCellFilter bulkLoadFilter = new BulkLoadCellFilter(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java index 5972734f6dd..4e9d67a036f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java @@ -21,8 +21,6 @@ import java.util.Collections; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hbase.Abortable; @@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.replication.ReplicationQueuesClient; import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implementation of a file cleaner that checks if a hfile is still scheduled for replication before @@ -43,7 +43,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate { - private static final Log LOG = LogFactory.getLog(ReplicationHFileCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationHFileCleaner.class); private ZKWatcher zkw; private ReplicationQueuesClient rqc; private boolean stopped = false; @@ -192,9 +192,7 @@ public class ReplicationHFileCleaner extends BaseHFileCleanerDelegate { @Override public void abort(String why, Throwable e) { LOG.warn("ReplicationHFileCleaner received abort, ignoring. Reason: " + why); - if (LOG.isDebugEnabled()) { - LOG.debug(e); - } + LOG.debug(e.toString(), e); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java index 57ed8427ac8..773e10e0e3b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java @@ -18,11 +18,10 @@ */ package org.apache.hadoop.hbase.replication.master; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.apache.yetus.audience.InterfaceAudience; +import java.io.IOException; +import java.util.Collections; +import java.util.Set; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hbase.Abortable; @@ -31,15 +30,16 @@ import org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate; import org.apache.hadoop.hbase.replication.ReplicationFactory; import org.apache.hadoop.hbase.replication.ReplicationQueuesClient; import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments; +import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.apache.hadoop.hbase.zookeeper.ZKWatcher; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.Collections; -import java.util.Set; - +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; -import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.apache.zookeeper.KeeperException; /** * Implementation of a log cleaner that checks if a log is still scheduled for @@ -47,7 +47,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class ReplicationLogCleaner extends BaseLogCleanerDelegate { - private static final Log LOG = LogFactory.getLog(ReplicationLogCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationLogCleaner.class); private ZKWatcher zkw; private ReplicationQueuesClient replicationQueues; private boolean stopped = false; @@ -140,9 +140,7 @@ public class ReplicationLogCleaner extends BaseLogCleanerDelegate { @Override public void abort(String why, Throwable e) { LOG.warn("ReplicationLogCleaner received abort, ignoring. Reason: " + why); - if (LOG.isDebugEnabled()) { - LOG.debug(e); - } + LOG.debug(e.toString(), e); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java index 5c8fba39709..ea5509f64e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationPeerConfigUpgrader.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.replication.master; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -38,7 +36,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos; /** @@ -49,7 +48,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos; @InterfaceStability.Unstable public class ReplicationPeerConfigUpgrader extends ReplicationStateZKBase { - private static final Log LOG = LogFactory.getLog(ReplicationPeerConfigUpgrader.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerConfigUpgrader.class); public ReplicationPeerConfigUpgrader(ZKWatcher zookeeper, Configuration conf, Abortable abortable) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java index 8cfc3ce9b7a..b28c58fc28b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DefaultSourceFSConfigurationProvider.java @@ -16,14 +16,14 @@ import java.net.URL; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This will load all the xml configuration files for the source cluster replication ID from @@ -31,7 +31,9 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class DefaultSourceFSConfigurationProvider implements SourceFSConfigurationProvider { - private static final Log LOG = LogFactory.getLog(DefaultSourceFSConfigurationProvider.class); + private static final Logger LOG = + LoggerFactory.getLogger(DefaultSourceFSConfigurationProvider.class); + // Map containing all the source clusters configurations against their replication cluster id private Map sourceClustersConfs = new HashMap<>(); private static final String XML = ".xml"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java index ff5e5c7e12a..93b86494ded 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java @@ -28,8 +28,6 @@ import java.util.Queue; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileStatus; @@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLongMap; /** @@ -71,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLo public class DumpReplicationQueues extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(DumpReplicationQueues.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(DumpReplicationQueues.class.getName()); private List deadRegionServers; private List deletedQueues; @@ -417,7 +417,7 @@ public class DumpReplicationQueues extends Configured implements Tool { public void abort(String why, Throwable e) { LOG.warn("DumpReplicationQueue received abort, ignoring. Reason: " + why); if (LOG.isDebugEnabled()) { - LOG.debug(e); + LOG.debug(e.toString(), e); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java index c1ed64413db..16fd0c3d7f8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java @@ -40,8 +40,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Abortable; @@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -77,8 +77,8 @@ import org.apache.hadoop.ipc.RemoteException; */ @InterfaceAudience.Private public class HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoint { - - private static final Log LOG = LogFactory.getLog(HBaseInterClusterReplicationEndpoint.class); + private static final Logger LOG = + LoggerFactory.getLogger(HBaseInterClusterReplicationEndpoint.class); private static final long DEFAULT_MAX_TERMINATION_WAIT_MULTIPLIER = 2; @@ -144,7 +144,7 @@ public class HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoi // Set the size limit for replication RPCs to 95% of the max request size. // We could do with less slop if we have an accurate estimate of encoded size. Being // conservative for now. - this.replicationRpcLimit = (int)(0.95 * (double)conf.getLong(RpcServer.MAX_REQUEST_SIZE, + this.replicationRpcLimit = (int)(0.95 * conf.getLong(RpcServer.MAX_REQUEST_SIZE, RpcServer.DEFAULT_MAX_REQUEST_SIZE)); this.dropOnDeletedTables = this.conf.getBoolean(HConstants.REPLICATION_DROP_ON_DELETED_TABLE_KEY, false); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java index eb29ac4dc95..a2cd03e7e8d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java @@ -31,8 +31,6 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Table; @@ -70,7 +70,7 @@ public class HFileReplicator { "hbase.replication.bulkload.copy.hfiles.perthread"; public static final int REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_DEFAULT = 10; - private static final Log LOG = LogFactory.getLog(HFileReplicator.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileReplicator.class); private static final String UNDERSCORE = "_"; private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java index 09ddbd40bd6..9ca1c844319 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.replication.regionserver; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.metrics.BaseSource; @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION) public class MetricsSource implements BaseSource { - private static final Log LOG = LogFactory.getLog(MetricsSource.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsSource.class); // tracks last shipped timestamp for each wal group private Map lastTimeStamps = new HashMap<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java index cabf85a7f83..bd191e33979 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSource.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.UUID; import java.util.concurrent.PriorityBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -32,6 +30,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationPeers; import org.apache.hadoop.hbase.replication.ReplicationQueues; @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; @InterfaceAudience.Private public class RecoveredReplicationSource extends ReplicationSource { - private static final Log LOG = LogFactory.getLog(RecoveredReplicationSource.class); + private static final Logger LOG = LoggerFactory.getLogger(RecoveredReplicationSource.class); private String actualPeerId; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java index af84868d61f..630b90b68e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceShipper.java @@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.replication.regionserver; import java.io.IOException; import java.util.concurrent.PriorityBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationQueues; import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReader.WALEntryBatch; @@ -36,8 +36,9 @@ import org.apache.hadoop.hbase.util.Threads; */ @InterfaceAudience.Private public class RecoveredReplicationSourceShipper extends ReplicationSourceShipper { + private static final Logger LOG = + LoggerFactory.getLogger(RecoveredReplicationSourceShipper.class); - private static final Log LOG = LogFactory.getLog(RecoveredReplicationSourceShipper.class); protected final RecoveredReplicationSource source; private final ReplicationQueues replicationQueues; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java index edd1b2ad688..0af3f5cacca 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RecoveredReplicationSourceWALReader.java @@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.replication.regionserver; import java.util.concurrent.PriorityBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.replication.WALEntryFilter; /** @@ -35,7 +35,8 @@ import org.apache.hadoop.hbase.replication.WALEntryFilter; @InterfaceAudience.Private @InterfaceStability.Evolving public class RecoveredReplicationSourceWALReader extends ReplicationSourceWALReader { - private static final Log LOG = LogFactory.getLog(RecoveredReplicationSourceWALReader.class); + private static final Logger LOG = + LoggerFactory.getLogger(RecoveredReplicationSourceWALReader.class); public RecoveredReplicationSourceWALReader(FileSystem fs, Configuration conf, PriorityBlockingQueue logQueue, long startPosition, WALEntryFilter filter, @@ -43,6 +44,7 @@ public class RecoveredReplicationSourceWALReader extends ReplicationSourceWALRea super(fs, conf, logQueue, startPosition, filter, source); } + @Override protected void handleEmptyWALEntryBatch(WALEntryBatch batch, Path currentPath) throws InterruptedException { LOG.trace("Didn't read any new entries from WAL"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java index 3d39146cacf..b9f2d0df6b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java @@ -32,8 +32,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellScanner; @@ -70,7 +68,8 @@ import org.apache.hadoop.hbase.wal.WALSplitter.RegionEntryBuffer; import org.apache.hadoop.hbase.wal.WALSplitter.SinkWriter; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; @@ -84,7 +83,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWA @InterfaceAudience.Private public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint { - private static final Log LOG = LogFactory.getLog(RegionReplicaReplicationEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionReplicaReplicationEndpoint.class); // Can be configured differently than hbase.client.retries.number private static String CLIENT_RETRIES_NUMBER @@ -161,8 +160,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint { try { outputSink.finishWritingAndClose(); } catch (IOException ex) { - LOG.warn("Got exception while trying to close OutputSink"); - LOG.warn(ex); + LOG.warn("Got exception while trying to close OutputSink", ex); } } if (this.pool != null) { @@ -583,6 +581,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint { this.initialEncodedRegionName = regionInfo.getEncodedNameAsBytes(); } + @Override public ReplicateWALEntryResponse call(HBaseRpcController controller) throws Exception { // Check whether we should still replay this entry. If the regions are changed, or the // entry is not coming form the primary region, filter it out because we do not need it. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java index 2a2df60303b..d8212e95aee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java @@ -29,8 +29,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.yetus.audience.InterfaceAudience; @@ -62,7 +60,8 @@ import org.apache.hadoop.hbase.replication.master.ReplicationHFileCleaner; import org.apache.hadoop.hbase.replication.master.ReplicationLogCleaner; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** @@ -71,8 +70,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa @InterfaceAudience.Private public class Replication implements ReplicationSourceService, ReplicationSinkService, WALActionsListener { - private static final Log LOG = - LogFactory.getLog(Replication.class); + private static final Logger LOG = + LoggerFactory.getLogger(Replication.class); private boolean replicationForBulkLoadData; private ReplicationSourceManager replicationManager; private ReplicationQueues replicationQueues; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java index e72f6e2a091..ec478d5e09b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; @@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.coprocessor.ObserverContext; @@ -48,7 +48,7 @@ import javax.validation.constraints.Null; @CoreCoprocessor @InterfaceAudience.Private public class ReplicationObserver implements RegionCoprocessor, RegionObserver { - private static final Log LOG = LogFactory.getLog(ReplicationObserver.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationObserver.class); @Override public Optional getRegionObserver() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java index 21947966598..57e185a7a26 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java @@ -31,8 +31,6 @@ import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; @@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.util.Pair; @InterfaceAudience.Private public class ReplicationSink { - private static final Log LOG = LogFactory.getLog(ReplicationSink.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationSink.class); private final Configuration conf; // Volatile because of note in here -- look for double-checked locking: // http://www.oracle.com/technetwork/articles/javase/bloch-effective-08-qa-140880.html diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java index eb882f3a499..58685c2495e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java @@ -17,25 +17,24 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; - import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService; import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService; /** * Maintains a collection of peers to replicate to, and randomly selects a @@ -44,7 +43,7 @@ import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint; */ public class ReplicationSinkManager { - private static final Log LOG = LogFactory.getLog(ReplicationSinkManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationSinkManager.class); /** * Default maximum number of times a replication sink can be reported as bad before diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java index ea6c6d44aff..f4f35ae0c57 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java @@ -33,8 +33,6 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.RSRpcServices; import org.apache.hadoop.hbase.replication.ChainWALEntryFilter; import org.apache.hadoop.hbase.replication.ClusterMarkingEntryFilter; @@ -78,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Private public class ReplicationSource extends Thread implements ReplicationSourceInterface { - private static final Log LOG = LogFactory.getLog(ReplicationSource.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationSource.class); // Queues of logs to process, entry in format of walGroupId->queue, // each presents a queue for one wal group private Map> queues = new HashMap<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java index e97da24a563..865a2028708 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java @@ -18,10 +18,10 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.replication.ReplicationQueueInfo; /** @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.replication.ReplicationQueueInfo; @InterfaceAudience.Private public class ReplicationSourceFactory { - private static final Log LOG = LogFactory.getLog(ReplicationSourceFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceFactory.class); static ReplicationSourceInterface create(Configuration conf, String peerId) { ReplicationQueueInfo replicationQueueInfo = new ReplicationQueueInfo(peerId); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java index c518ece18fb..07c53e1c9e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java @@ -40,8 +40,7 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -67,7 +66,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -89,8 +89,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa */ @InterfaceAudience.Private public class ReplicationSourceManager implements ReplicationListener { - private static final Log LOG = - LogFactory.getLog(ReplicationSourceManager.class); + private static final Logger LOG = + LoggerFactory.getLogger(ReplicationSourceManager.class); // List of all the sources that read this RS's logs private final List sources; // List of all the sources we got from died RSs diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java index 9dfe686d17c..1e1dcc8c15c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java @@ -24,14 +24,14 @@ import java.util.Map; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceWALReader.WALEntryBatch; @@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; */ @InterfaceAudience.Private public class ReplicationSourceShipper extends Thread { - private static final Log LOG = LogFactory.getLog(ReplicationSourceShipper.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceShipper.class); // Hold the state of a replication worker thread public enum WorkerState { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java index bbcaaa4d73b..1ec797fb141 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceWALReader.java @@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -45,7 +43,8 @@ import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor; @@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescript @InterfaceAudience.Private @InterfaceStability.Evolving public class ReplicationSourceWALReader extends Thread { - private static final Log LOG = LogFactory.getLog(ReplicationSourceWALReader.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationSourceWALReader.class); private final PriorityBlockingQueue logQueue; private final FileSystem fs; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java index db3b1fc037f..21b8ac5c8c3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.replication.regionserver; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; @@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * In a scenario of Replication based Disaster/Recovery, when hbase @@ -51,7 +51,7 @@ import org.apache.hadoop.util.ToolRunner; public class ReplicationSyncUp extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(ReplicationSyncUp.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationSyncUp.class.getName()); private static Configuration conf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java index 6277d24c5bc..7c83c0c1fac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java @@ -25,8 +25,6 @@ import java.util.NoSuchElementException; import java.util.OptionalLong; import java.util.concurrent.PriorityBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.FSUtils; @@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException; @InterfaceAudience.Private @InterfaceStability.Evolving class WALEntryStream implements Closeable { - private static final Log LOG = LogFactory.getLog(WALEntryStream.class); + private static final Logger LOG = LoggerFactory.getLogger(WALEntryStream.class); private Reader reader; private Path currentPath; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java index 7059bd8dc44..b3924350b41 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java @@ -34,9 +34,9 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager.InvalidToken; @@ -49,7 +49,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; @InterfaceAudience.Private public class HBaseSaslRpcServer { - private static final Log LOG = LogFactory.getLog(HBaseSaslRpcServer.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseSaslRpcServer.class); private final SaslServer saslServer; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index 0b40b7167f9..7b0e4cdf0d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -32,8 +32,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; @@ -75,6 +73,8 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Maintains lists of permission grants to users and groups to allow for @@ -119,7 +119,7 @@ public class AccessControlLists { * _acl_ table info: column keys */ public static final char ACL_KEY_DELIMITER = ','; - private static final Log LOG = LogFactory.getLog(AccessControlLists.class); + private static final Logger LOG = LoggerFactory.getLogger(AccessControlLists.class); /** * Stores a new user permission grant in the access control lists table. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 0f9d8a5a8f8..0bb61c921f8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -39,8 +39,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; @@ -139,6 +137,8 @@ import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides basic authorization checks for data access and administrative @@ -180,10 +180,10 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, MasterObserver, RegionObserver, RegionServerObserver, EndpointObserver, BulkLoadObserver { // TODO: encapsulate observer functions into separate class/sub-class. - private static final Log LOG = LogFactory.getLog(AccessController.class); + private static final Logger LOG = LoggerFactory.getLogger(AccessController.class); - private static final Log AUDITLOG = - LogFactory.getLog("SecurityLogger."+AccessController.class.getName()); + private static final Logger AUDITLOG = + LoggerFactory.getLogger("SecurityLogger."+AccessController.class.getName()); private static final String CHECK_COVERING_PERM = "check_covering_perm"; private static final String TAG_CHECK_PASSED = "tag_check_passed"; private static final byte[] TRUE = Bytes.toBytes(true); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java index a1179b1dc3a..44a4f572043 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java @@ -24,8 +24,6 @@ import java.util.Optional; import java.util.regex.Matcher; import org.apache.commons.io.FilenameUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.coprocessor.MasterObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Master observer for restricting coprocessor assignments. @@ -49,8 +49,8 @@ public class CoprocessorWhitelistMasterObserver implements MasterCoprocessor, Ma public static final String CP_COPROCESSOR_WHITELIST_PATHS_KEY = "hbase.coprocessor.region.whitelist.paths"; - private static final Log LOG = LogFactory - .getLog(CoprocessorWhitelistMasterObserver.class); + private static final Logger LOG = LoggerFactory + .getLogger(CoprocessorWhitelistMasterObserver.class); @Override public Optional getMasterObserver() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java index 2ba4ac5549f..53b10d0a8d9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java @@ -33,8 +33,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; @@ -42,11 +40,14 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.Bytes; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Performs authorization checks for a given user's assigned permissions @@ -97,7 +98,7 @@ public class TableAuthManager implements Closeable { } } - private static final Log LOG = LogFactory.getLog(TableAuthManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TableAuthManager.class); /** Cache of global permissions */ private volatile PermissionCache globalCache; @@ -769,7 +770,7 @@ public class TableAuthManager implements Closeable { if (refCount.get(instance) == null || refCount.get(instance) < 1) { String msg = "Something wrong with the TableAuthManager reference counting: " + instance + " whose count is " + refCount.get(instance); - LOG.fatal(msg); + LOG.error(HBaseMarkers.FATAL, msg); instance.close(); managerMap.remove(instance.getZKPermissionWatcher().getWatcher()); instance.getZKPermissionWatcher().getWatcher().abort(msg, null); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java index d45b5b5730a..2437657fbca 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hbase.security.access; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.TableName; @@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.Closeable; import java.io.IOException; @@ -53,7 +53,7 @@ import java.util.concurrent.RejectedExecutionException; */ @InterfaceAudience.Private public class ZKPermissionWatcher extends ZKListener implements Closeable { - private static final Log LOG = LogFactory.getLog(ZKPermissionWatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKPermissionWatcher.class); // parent node for permissions lists static final String ACL_NODE = "acl"; private final TableAuthManager authManager; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java index aa6b1e94b76..de8ea5d3ab0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -40,6 +38,8 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.Token; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Manages an internal list of secret keys used to sign new authentication @@ -60,7 +60,7 @@ public class AuthenticationTokenSecretManager static final String NAME_PREFIX = "SecretManager-"; - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( AuthenticationTokenSecretManager.class); private long lastKeyUpdate; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java index 3bf4df101a4..389bcc6be9e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/FsDelegationToken.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.security.token; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.security.token.Token; @@ -36,7 +36,7 @@ import org.apache.hadoop.security.token.Token; @InterfaceAudience.Private @InterfaceStability.Evolving public class FsDelegationToken { - private static final Log LOG = LogFactory.getLog(FsDelegationToken.class); + private static final Logger LOG = LoggerFactory.getLogger(FsDelegationToken.class); private final UserProvider userProvider; private final String renewer; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java index e3557520137..b137aaa30eb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java @@ -24,8 +24,6 @@ import com.google.protobuf.Service; import java.io.IOException; import java.util.Collections; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices; @@ -43,6 +41,8 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.Token; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides a service for obtaining authentication tokens via the @@ -53,7 +53,7 @@ import org.apache.yetus.audience.InterfaceAudience; public class TokenProvider implements AuthenticationProtos.AuthenticationService.Interface, RegionCoprocessor { - private static final Log LOG = LogFactory.getLog(TokenProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(TokenProvider.class); private AuthenticationTokenSecretManager secretManager; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index 3347e1caa62..54617601374 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -24,8 +24,7 @@ import java.security.PrivilegedExceptionAction; import com.google.protobuf.ByteString; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -43,6 +42,8 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.security.token.Token; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility methods for obtaining authentication tokens. @@ -50,7 +51,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Public public class TokenUtil { // This class is referenced indirectly by User out in common; instances are created by reflection - private static final Log LOG = LogFactory.getLog(TokenUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(TokenUtil.class); /** * Obtain and return an authentication token for the current user. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java index 96502fd82a1..d31b8a96528 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java @@ -18,13 +18,12 @@ package org.apache.hadoop.hbase.security.token; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; @@ -33,6 +32,8 @@ import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Synchronizes token encryption keys across cluster nodes. @@ -41,7 +42,7 @@ import org.apache.zookeeper.KeeperException; public class ZKSecretWatcher extends ZKListener { private static final String DEFAULT_ROOT_NODE = "tokenauth"; private static final String DEFAULT_KEYS_PARENT = "keys"; - private static final Log LOG = LogFactory.getLog(ZKSecretWatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKSecretWatcher.class); private AuthenticationTokenSecretManager secretManager; private String baseKeyZNode; @@ -77,7 +78,7 @@ public class ZKSecretWatcher extends ZKListener { ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode); refreshNodes(nodes); } catch (KeeperException ke) { - LOG.fatal("Error reading data from zookeeper", ke); + LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke); watcher.abort("Error reading new key znode "+path, ke); } } @@ -110,10 +111,10 @@ public class ZKSecretWatcher extends ZKListener { new AuthenticationKey()); secretManager.addKey(key); } catch (KeeperException ke) { - LOG.fatal("Error reading data from zookeeper", ke); + LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke); watcher.abort("Error reading updated key znode "+path, ke); } catch (IOException ioe) { - LOG.fatal("Error reading key writables", ioe); + LOG.error(HBaseMarkers.FATAL, "Error reading key writables", ioe); watcher.abort("Error reading key writables from znode "+path, ioe); } } @@ -128,7 +129,7 @@ public class ZKSecretWatcher extends ZKListener { ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode); refreshNodes(nodes); } catch (KeeperException ke) { - LOG.fatal("Error reading data from zookeeper", ke); + LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke); watcher.abort("Error reading changed keys from zookeeper", ke); } } @@ -152,8 +153,8 @@ public class ZKSecretWatcher extends ZKListener { data, new AuthenticationKey()); secretManager.addKey(key); } catch (IOException ioe) { - LOG.fatal("Failed reading new secret key for id '" + keyId + - "' from zk", ioe); + LOG.error(HBaseMarkers.FATAL, "Failed reading new secret key for id '" + + keyId + "' from zk", ioe); watcher.abort("Error deserializing key from znode "+path, ioe); } } @@ -170,8 +171,8 @@ public class ZKSecretWatcher extends ZKListener { } catch (KeeperException.NoNodeException nne) { LOG.error("Non-existent znode "+keyZNode+" for key "+key.getKeyId(), nne); } catch (KeeperException ke) { - LOG.fatal("Failed removing znode "+keyZNode+" for key "+key.getKeyId(), - ke); + LOG.error(HBaseMarkers.FATAL, "Failed removing znode "+keyZNode+" for key "+ + key.getKeyId(), ke); watcher.abort("Unhandled zookeeper error removing znode "+keyZNode+ " for key "+key.getKeyId(), ke); } @@ -184,7 +185,7 @@ public class ZKSecretWatcher extends ZKListener { // TODO: is there any point in retrying beyond what ZK client does? ZKUtil.createSetData(watcher, keyZNode, keyData); } catch (KeeperException ke) { - LOG.fatal("Unable to synchronize master key "+key.getKeyId()+ + LOG.error(HBaseMarkers.FATAL, "Unable to synchronize master key "+key.getKeyId()+ " to znode "+keyZNode, ke); watcher.abort("Unable to synchronize secret key "+ key.getKeyId()+" in zookeeper", ke); @@ -205,7 +206,7 @@ public class ZKSecretWatcher extends ZKListener { ZKUtil.createSetData(watcher, keyZNode, keyData); } } catch (KeeperException ke) { - LOG.fatal("Unable to update master key "+key.getKeyId()+ + LOG.error(HBaseMarkers.FATAL, "Unable to update master key "+key.getKeyId()+ " in znode "+keyZNode); watcher.abort("Unable to synchronize secret key "+ key.getKeyId()+" in zookeeper", ke); @@ -224,7 +225,7 @@ public class ZKSecretWatcher extends ZKListener { ZKUtil.getChildDataAndWatchForNewChildren(watcher, keysParentZNode); refreshNodes(nodes); } catch (KeeperException ke) { - LOG.fatal("Error reading data from zookeeper", ke); + LOG.error(HBaseMarkers.FATAL, "Error reading data from zookeeper", ke); watcher.abort("Error reading changed keys from zookeeper", ke); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java index 0c6c914baad..fa7c7a74e01 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java @@ -40,8 +40,6 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.AuthUtil; @@ -74,11 +72,13 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService { - - private static final Log LOG = LogFactory.getLog(DefaultVisibilityLabelServiceImpl.class); + private static final Logger LOG = + LoggerFactory.getLogger(DefaultVisibilityLabelServiceImpl.class); // "system" label is having an ordinal value 1. private static final int SYSTEM_LABEL_ORDINAL = 1; @@ -507,7 +507,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService authLabels = (authLabels == null) ? new ArrayList<>() : authLabels; authorizations = new Authorizations(authLabels); } catch (Throwable t) { - LOG.error(t); + LOG.error(t.toString(), t); throw new IOException(t); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java index 0b7214fb309..77bc2057cdc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java @@ -22,9 +22,9 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.security.User; @@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.security.User; */ @InterfaceAudience.Private public class DefinedSetFilterScanLabelGenerator implements ScanLabelGenerator { - - private static final Log LOG = LogFactory.getLog(DefinedSetFilterScanLabelGenerator.class); + private static final Logger LOG = + LoggerFactory.getLogger(DefinedSetFilterScanLabelGenerator.class); private Configuration conf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java index a2a9e04115b..e2bc16b5f02 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java @@ -22,9 +22,9 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.security.User; @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.security.User; @InterfaceAudience.Private public class EnforcingScanLabelGenerator implements ScanLabelGenerator { - private static final Log LOG = LogFactory.getLog(EnforcingScanLabelGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(EnforcingScanLabelGenerator.class); private Configuration conf; private VisibilityLabelsCache labelsCache; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java index cd6ef86c5cf..1c77a4d008d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java @@ -22,9 +22,9 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.security.User; @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.security.User; @InterfaceAudience.Private public class FeedUserAuthScanLabelGenerator implements ScanLabelGenerator { - private static final Log LOG = LogFactory.getLog(FeedUserAuthScanLabelGenerator.class); + private static final Logger LOG = LoggerFactory.getLogger(FeedUserAuthScanLabelGenerator.class); private Configuration conf; private VisibilityLabelsCache labelsCache; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index c6f81c4c65b..b90f10484ed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -38,8 +38,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; @@ -114,6 +112,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in @@ -125,8 +125,8 @@ import org.apache.yetus.audience.InterfaceAudience; public class VisibilityController implements MasterCoprocessor, RegionCoprocessor, VisibilityLabelsService.Interface, MasterObserver, RegionObserver { - private static final Log LOG = LogFactory.getLog(VisibilityController.class); - private static final Log AUDITLOG = LogFactory.getLog("SecurityLogger." + private static final Logger LOG = LoggerFactory.getLogger(VisibilityController.class); + private static final Logger AUDITLOG = LoggerFactory.getLogger("SecurityLogger." + VisibilityController.class.getName()); // flags if we are running on a region of the 'labels' table private boolean labelsRegion = false; @@ -772,7 +772,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso LOG.error("User is not having required permissions to add labels", e); setExceptionResults(visLabels.size(), e, response); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); setExceptionResults(visLabels.size(), e, response); } } @@ -827,7 +827,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso LOG.error("User is not having required permissions to set authorization", e); setExceptionResults(auths.size(), e, response); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); setExceptionResults(auths.size(), e, response); } } @@ -951,7 +951,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso LOG.error("User is not having required permissions to clear authorization", e); setExceptionResults(auths.size(), e, response); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); setExceptionResults(auths.size(), e, response); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java index 16eff84310b..74531b92ce7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java @@ -19,9 +19,9 @@ package org.apache.hadoop.hbase.security.visibility; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; @@ -31,7 +31,7 @@ import org.apache.hadoop.util.ReflectionUtils; @InterfaceAudience.Private public class VisibilityLabelServiceManager { - private static final Log LOG = LogFactory.getLog(VisibilityLabelServiceManager.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityLabelServiceManager.class); public static final String VISIBILITY_LABEL_SERVICE_CLASS = "hbase.regionserver.visibility.label.service.class"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java index 85bc0d51732..438b6169478 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java @@ -27,8 +27,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.yetus.audience.InterfaceAudience; @@ -39,6 +37,8 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Maintains the cache for visibility labels and also uses the zookeeper to update the labels in the @@ -48,7 +48,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Private public class VisibilityLabelsCache implements VisibilityLabelOrdinalProvider { - private static final Log LOG = LogFactory.getLog(VisibilityLabelsCache.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityLabelsCache.class); private static final List EMPTY_LIST = Collections.emptyList(); private static final Set EMPTY_SET = Collections.emptySet(); private static VisibilityLabelsCache instance; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java index dc467d6f91d..f6ed72ff749 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityNewVersionBehaivorTracker.java @@ -30,21 +30,21 @@ import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.regionserver.querymatcher.NewVersionBehaviorTracker; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Similar to MvccSensitiveTracker but tracks the visibility expression also before * deciding if a Cell can be considered deleted */ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTracker { - - private static final Log LOG = LogFactory.getLog(VisibilityNewVersionBehaivorTracker.class); + private static final Logger LOG = + LoggerFactory.getLogger(VisibilityNewVersionBehaivorTracker.class); public VisibilityNewVersionBehaivorTracker(NavigableSet columns, CellComparator cellComparator, int minVersion, int maxVersion, int resultMaxVersions, @@ -85,6 +85,7 @@ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTrack mvccCountingMap.put(Long.MAX_VALUE, new TreeSet()); } + @Override protected VisibilityDeleteVersionsNode getDeepCopy() { VisibilityDeleteVersionsNode node = new VisibilityDeleteVersionsNode(ts, mvcc, tagInfo); for (Map.Entry> e : deletesMap.entrySet()) { @@ -96,6 +97,7 @@ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTrack return node; } + @Override public void addVersionDelete(Cell cell) { SortedMap set = deletesMap.get(cell.getTimestamp()); if (set == null) { @@ -196,6 +198,7 @@ public class VisibilityNewVersionBehaivorTracker extends NewVersionBehaviorTrack return DeleteResult.NOT_DELETED; } + @Override protected void resetInternal() { delFamMap.put(Long.MAX_VALUE, new VisibilityDeleteVersionsNode(Long.MIN_VALUE, Long.MAX_VALUE, new TagInfo())); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java index 14507a458f4..cd495ce442a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java @@ -24,8 +24,6 @@ import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -36,13 +34,14 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.WALEntryFilter; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALEdit; -import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class VisibilityReplicationEndpoint implements ReplicationEndpoint { - private static final Log LOG = LogFactory.getLog(VisibilityReplicationEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityReplicationEndpoint.class); private final ReplicationEndpoint delegator; private final VisibilityLabelService visibilityLabelsService; @@ -111,7 +110,7 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint { newEdit.add(cell); } } - newEntries.add(new Entry(((WALKeyImpl)entry.getKey()), newEdit)); + newEntries.add(new Entry((entry.getKey()), newEdit)); } replicateContext.setEntries(newEntries); return delegator.replicate(replicateContext); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java index da0938b22df..6b9ac7449a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java @@ -23,9 +23,9 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.querymatcher.ScanDeleteTracker; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.Triple; @InterfaceAudience.Private public class VisibilityScanDeleteTracker extends ScanDeleteTracker { - private static final Log LOG = LogFactory.getLog(VisibilityScanDeleteTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityScanDeleteTracker.class); /** * This tag is used for the DELETE cell which has no visibility label. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java index 3db8d0ebf14..c177c2b09e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -35,8 +35,6 @@ import java.util.Optional; import java.util.Set; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; @@ -65,6 +63,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility method to support visibility @@ -72,7 +72,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class VisibilityUtils { - private static final Log LOG = LogFactory.getLog(VisibilityUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(VisibilityUtils.class); public static final String VISIBILITY_LABEL_GENERATOR_CLASS = "hbase.regionserver.scan.visibility.label.generator.class"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java index 5cc244cd69c..d428ff4291e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.security.visibility; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.zookeeper.ZKListener; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; @@ -28,6 +26,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A zk watcher that watches the labels table znode. This would create a znode @@ -36,7 +36,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Private public class ZKVisibilityLabelWatcher extends ZKListener { - private static final Log LOG = LogFactory.getLog(ZKVisibilityLabelWatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKVisibilityLabelWatcher.class); private static final String VISIBILITY_LABEL_ZK_PATH = "zookeeper.znode.visibility.label.parent"; private static final String DEFAULT_VISIBILITY_LABEL_NODE = "visibility/labels"; private static final String VISIBILITY_USER_AUTHS_ZK_PATH = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java index e08d547d6b9..99690de2031 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java @@ -34,8 +34,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -66,7 +64,8 @@ import org.apache.hadoop.hbase.util.ModifyRegionUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.io.IOUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; @@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot */ @InterfaceAudience.Private public class RestoreSnapshotHelper { - private static final Log LOG = LogFactory.getLog(RestoreSnapshotHelper.class); + private static final Logger LOG = LoggerFactory.getLogger(RestoreSnapshotHelper.class); private final Map regionsMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java index 61a4a85ed07..d5cab63300b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java @@ -21,9 +21,6 @@ import java.io.IOException; import java.security.PrivilegedExceptionAction; import java.util.Collections; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -32,18 +29,22 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; -import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.AccessControlLists; import org.apache.hadoop.hbase.security.access.ShadedAccessControlUtil; import org.apache.hadoop.hbase.security.access.TablePermission; -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Utility class to help manage {@link SnapshotDescription SnapshotDesriptions}. @@ -97,7 +98,7 @@ public final class SnapshotDescriptionUtils { } } - private static final Log LOG = LogFactory.getLog(SnapshotDescriptionUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotDescriptionUtils.class); /** * Version of the fs layout for a snapshot. Future snapshots may have different file layouts, * which we may need to read in differently. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java index c76155c6de9..7d7e526b6d9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java @@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; @@ -65,7 +64,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot */ @InterfaceAudience.Public public final class SnapshotInfo extends AbstractHBaseTool { - private static final Log LOG = LogFactory.getLog(SnapshotInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotInfo.class); static final class Options { static final Option SNAPSHOT = new Option(null, "snapshot", true, "Snapshot to examine."); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java index 1a7c7f017aa..b3345854540 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java @@ -28,8 +28,6 @@ import java.util.Map; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Threads; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; @@ -69,7 +68,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot */ @InterfaceAudience.Private public final class SnapshotManifest { - private static final Log LOG = LogFactory.getLog(SnapshotManifest.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifest.class); public static final String SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY = "snapshot.manifest.size.limit"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java index 61cbbd11723..7dfeab39a4b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV1.java @@ -28,8 +28,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorCompletionService; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; @@ -57,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot */ @InterfaceAudience.Private public final class SnapshotManifestV1 { - private static final Log LOG = LogFactory.getLog(SnapshotManifestV1.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifestV1.class); public static final int DESCRIPTOR_VERSION = 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java index 561eb776385..5b7152aad50 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java @@ -27,8 +27,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorCompletionService; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -40,7 +38,8 @@ import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -58,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot */ @InterfaceAudience.Private public final class SnapshotManifestV2 { - private static final Log LOG = LogFactory.getLog(SnapshotManifestV2.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotManifestV2.class); public static final int DESCRIPTOR_VERSION = 2; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java index 0cca62fa338..b157d01e191 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java @@ -29,8 +29,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; @@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot */ @InterfaceAudience.Private public final class SnapshotReferenceUtil { - private static final Log LOG = LogFactory.getLog(SnapshotReferenceUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotReferenceUtil.class); public interface StoreFileVisitor { void storeFile(final RegionInfo regionInfo, final String familyName, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java index e942a020e05..969a7579bae 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java @@ -49,8 +49,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.time.StopWatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.ChoreService; @@ -98,7 +96,8 @@ import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.client.ConnectStringParser; import org.apache.zookeeper.data.Stat; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -540,10 +539,10 @@ public final class Canary implements Tool { LOG.debug("The targeted table was disabled. Assuming success."); } catch (DoNotRetryIOException dnrioe) { sink.publishReadFailure(tableName.getNameAsString(), serverName); - LOG.error(dnrioe); + LOG.error(dnrioe.toString(), dnrioe); } catch (IOException e) { sink.publishReadFailure(tableName.getNameAsString(), serverName); - LOG.error(e); + LOG.error(e.toString(), e); } finally { if (table != null) { try { @@ -571,7 +570,7 @@ public final class Canary implements Tool { private static final long DEFAULT_TIMEOUT = 600000; // 10 mins private static final int MAX_THREADS_NUM = 16; // #threads to contact regions - private static final Log LOG = LogFactory.getLog(Canary.class); + private static final Logger LOG = LoggerFactory.getLogger(Canary.class); public static final TableName DEFAULT_WRITE_TABLE_NAME = TableName.valueOf( NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "canary"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java index c457e224dac..b5eea9cd0cb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.java @@ -50,8 +50,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.apache.commons.lang3.mutable.MutableInt; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileStatus; @@ -63,6 +61,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ClientServiceCallable; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; @@ -109,7 +109,7 @@ import org.apache.hadoop.util.ToolRunner; @InterfaceAudience.Public public class LoadIncrementalHFiles extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(LoadIncrementalHFiles.class); + private static final Logger LOG = LoggerFactory.getLogger(LoadIncrementalHFiles.class); public static final String NAME = "completebulkload"; static final String RETRY_ON_IO_EXCEPTION = "hbase.bulkload.retries.retryOnIOException"; @@ -328,7 +328,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { if (queue.isEmpty()) { LOG.warn( "Bulk load operation did not find any files to load in " + "directory " + hfofDir != null - ? hfofDir.toUri() + ? hfofDir.toUri().toString() : "" + ". Does it contain files in " + "subdirectories that correspond to column family names?"); return Collections.emptyMap(); @@ -877,7 +877,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { for (LoadQueueItem q : queue) { err.append(" ").append(q.getFilePath()).append('\n'); } - LOG.error(err); + LOG.error(err.toString()); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java index 60fd22d8523..738ffc28c16 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.tool; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.coprocessor.ObserverContext; @@ -28,6 +26,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.OperationStatus; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Optional; @@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicLong; *

*/ public class WriteSinkCoprocessor implements RegionCoprocessor, RegionObserver { - private static final Log LOG = LogFactory.getLog(WriteSinkCoprocessor.class); + private static final Logger LOG = LoggerFactory.getLogger(WriteSinkCoprocessor.class); private final AtomicLong ops = new AtomicLong(); @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java index b4851bf1055..89ff5b7222e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.util; import java.io.DataInput; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.io.hfile.CacheConfig; @@ -31,6 +29,8 @@ import org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Handles Bloom filter initialization based on configuration and serialized metadata in the reader @@ -39,8 +39,8 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public final class BloomFilterFactory { - private static final Log LOG = - LogFactory.getLog(BloomFilterFactory.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(BloomFilterFactory.class.getName()); /** This class should not be instantiated. */ private BloomFilterFactory() {} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java index dbc7afa7440..b6af8a55073 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java @@ -22,11 +22,11 @@ import java.io.IOException; import java.util.Locale; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellComparator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -52,7 +52,7 @@ import org.apache.hadoop.io.compress.Compressor; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public class CompressionTest { - private static final Log LOG = LogFactory.getLog(CompressionTest.class); + private static final Logger LOG = LoggerFactory.getLogger(CompressionTest.class); public static boolean testCompression(String codec) { codec = codec.toLowerCase(Locale.ROOT); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java index dadb615498c..7b9f021313a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConnectionCache.java @@ -23,13 +23,14 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.Lock; -import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.commons.logging.LogFactory; /** * A utility to store user specific HConnections in memory. @@ -48,7 +48,7 @@ import org.apache.commons.logging.LogFactory; */ @InterfaceAudience.Private public class ConnectionCache { - private static final Log LOG = LogFactory.getLog(ConnectionCache.class); + private static final Logger LOG = LoggerFactory.getLogger(ConnectionCache.class); private final Map connections = new ConcurrentHashMap<>(); private final KeyLocker locker = new KeyLocker<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java index 2cc4f44e2c7..6c6a09d92f4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java @@ -32,11 +32,10 @@ import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** @@ -45,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private @InterfaceStability.Evolving public class DirectMemoryUtils { - private static final Log LOG = LogFactory.getLog(DirectMemoryUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(DirectMemoryUtils.class); private static final String MEMORY_USED = "MemoryUsed"; private static final MBeanServer BEAN_SERVER; private static final ObjectName NIO_DIRECT_POOL; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java index e6b8c0aa34c..2687d3b033a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java @@ -24,10 +24,10 @@ import java.io.IOException; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider; @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.security.EncryptionUtil; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class EncryptionTest { - private static final Log LOG = LogFactory.getLog(EncryptionTest.class); + private static final Logger LOG = LoggerFactory.getLogger(EncryptionTest.class); static final Map keyProviderResults = new ConcurrentHashMap<>(); static final Map cipherProviderResults = new ConcurrentHashMap<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java index 8e13f4015d3..cdc0aad3b55 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java @@ -24,22 +24,22 @@ import java.io.InterruptedIOException; import java.lang.reflect.Method; import java.net.InetSocketAddress; import java.net.URI; +import java.util.Collection; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.Collection; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * Implementation for hdfs @@ -47,7 +47,7 @@ import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; @InterfaceAudience.Private @InterfaceStability.Evolving public class FSHDFSUtils extends FSUtils { - private static final Log LOG = LogFactory.getLog(FSHDFSUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(FSHDFSUtils.class); private static Class dfsUtilClazz; private static Method getNNAddressesMethod; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java index bb7b1f3a29e..4207f391db8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java @@ -21,19 +21,19 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; /** * MapR implementation. */ @InterfaceAudience.Private public class FSMapRUtils extends FSUtils { - private static final Log LOG = LogFactory.getLog(FSMapRUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(FSMapRUtils.class); public void recoverFileLease(final FileSystem fs, final Path p, Configuration conf, CancelableProgressable reporter) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java index 66ac3956d68..f258e6cd931 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java @@ -23,9 +23,10 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -39,7 +40,7 @@ import org.apache.hadoop.hbase.util.FSUtils; */ @InterfaceAudience.Private class FSRegionScanner implements Runnable { - static private final Log LOG = LogFactory.getLog(FSRegionScanner.class); + static private final Logger LOG = LoggerFactory.getLogger(FSRegionScanner.class); private Path regionPath; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java index e1bc189e3f3..5627e9a0450 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java @@ -30,8 +30,6 @@ import java.util.regex.Pattern; import edu.umd.cs.findbugs.annotations.Nullable; import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -40,6 +38,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; @@ -73,7 +73,7 @@ import org.apache.hadoop.hbase.TableName; */ @InterfaceAudience.Private public class FSTableDescriptors implements TableDescriptors { - private static final Log LOG = LogFactory.getLog(FSTableDescriptors.class); + private static final Logger LOG = LoggerFactory.getLogger(FSTableDescriptors.class); private final FileSystem fs; private final Path rootdir; private final boolean fsreadonly; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 81fcaf201f9..1620fd8bee4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -55,8 +55,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; @@ -74,6 +72,8 @@ import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.io.HFileLink; @@ -100,7 +100,7 @@ import org.apache.hadoop.util.StringUtils; */ @InterfaceAudience.Private public abstract class FSUtils extends CommonFSUtils { - private static final Log LOG = LogFactory.getLog(FSUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(FSUtils.class); private static final String THREAD_POOLSIZE = "hbase.client.localityCheck.threadPoolSize"; private static final int DEFAULT_THREAD_POOLSIZE = 2; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java index 353f1c7a253..24cd2234717 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -34,7 +34,7 @@ import org.apache.hadoop.fs.PathFilter; */ @InterfaceAudience.Private public final class FSVisitor { - private static final Log LOG = LogFactory.getLog(FSVisitor.class); + private static final Logger LOG = LoggerFactory.getLogger(FSVisitor.class); public interface StoreFileVisitor { void storeFile(final String region, final String family, final String hfileName) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index aab3b36e678..5e7d7288a1e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -38,6 +38,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.SortedMap; @@ -61,8 +62,6 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataOutputStream; @@ -110,6 +109,7 @@ import org.apache.hadoop.hbase.io.FileLink; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -141,7 +141,8 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; @@ -223,7 +224,7 @@ public class HBaseFsck extends Configured implements Closeable { /********************** * Internal resources **********************/ - private static final Log LOG = LogFactory.getLog(HBaseFsck.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(HBaseFsck.class.getName()); private ClusterStatus status; private ClusterConnection connection; private Admin admin; @@ -805,7 +806,7 @@ public class HBaseFsck extends Configured implements Closeable { cleanupHbckZnode(); unlockHbck(); } catch (Exception io) { - LOG.warn(io); + LOG.warn(io.toString(), io); } finally { if (zkw != null) { zkw.close(); @@ -907,11 +908,11 @@ public class HBaseFsck extends Configured implements Closeable { errors.reportError(ERROR_CODE.BOUNDARIES_ERROR, "Found issues with regions boundaries", tablesInfo.get(regionInfo.getTable())); LOG.warn("Region's boundaries not aligned between stores and META for:"); - LOG.warn(currentRegionBoundariesInformation); + LOG.warn(Objects.toString(currentRegionBoundariesInformation)); } } } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } } @@ -1597,8 +1598,8 @@ public class HBaseFsck extends Configured implements Closeable { // populate meta List puts = generatePuts(tablesInfo); if (puts == null) { - LOG.fatal("Problem encountered when creating new hbase:meta entries. " + - "You may need to restore the previously sidelined hbase:meta"); + LOG.error(HBaseMarkers.FATAL, "Problem encountered when creating new hbase:meta " + + "entries. You may need to restore the previously sidelined hbase:meta"); return false; } meta.batchMutate(puts.toArray(new Put[puts.size()]), HConstants.NO_NONCE, HConstants.NO_NONCE); @@ -1791,9 +1792,9 @@ public class HBaseFsck extends Configured implements Closeable { try { sidelineTable(fs, TableName.META_TABLE_NAME, hbaseDir, backupDir); } catch (IOException e) { - LOG.fatal("... failed to sideline meta. Currently in inconsistent state. To restore " - + "try to rename hbase:meta in " + backupDir.getName() + " to " - + hbaseDir.getName() + ".", e); + LOG.error(HBaseMarkers.FATAL, "... failed to sideline meta. Currently in " + + "inconsistent state. To restore try to rename hbase:meta in " + + backupDir.getName() + " to " + hbaseDir.getName() + ".", e); throw e; // throw original exception } return backupDir; @@ -1882,7 +1883,7 @@ public class HBaseFsck extends Configured implements Closeable { * Record the location of the hbase:meta region as found in ZooKeeper. */ private boolean recordMetaRegion() throws IOException { - RegionLocations rl = ((ClusterConnection)connection).locateRegion(TableName.META_TABLE_NAME, + RegionLocations rl = connection.locateRegion(TableName.META_TABLE_NAME, HConstants.EMPTY_START_ROW, false, false); if (rl == null) { errors.reportError(ERROR_CODE.NULL_META_REGION, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java index afb6c5b0e1c..b8811c7ce9a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java @@ -24,8 +24,6 @@ import java.util.EnumSet; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ClusterStatus.Option; @@ -46,6 +44,8 @@ import org.apache.hadoop.hbase.master.ServerManager; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class contains helper methods that repair parts of hbase's filesystem @@ -53,7 +53,7 @@ import org.apache.zookeeper.KeeperException; */ @InterfaceAudience.Private public class HBaseFsckRepair { - private static final Log LOG = LogFactory.getLog(HBaseFsckRepair.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseFsckRepair.class); /** * Fix multiple assignment by doing silent closes on each RS hosting the region diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java index b88c0e63f63..00410af2fa7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java @@ -24,9 +24,9 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.master.HMaster; @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer; */ @InterfaceAudience.Private public class JVMClusterUtil { - private static final Log LOG = LogFactory.getLog(JVMClusterUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(JVMClusterUtil.class); /** * Datastructure to hold RegionServer Thread and RegionServer instance diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java index 765edf93c49..202b9fb2d0e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java @@ -24,9 +24,9 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource; import org.apache.hadoop.conf.Configuration; @@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; */ @InterfaceAudience.Private public class JvmPauseMonitor { - private static final Log LOG = LogFactory.getLog(JvmPauseMonitor.class); + private static final Logger LOG = LoggerFactory.getLogger(JvmPauseMonitor.class); /** The target sleep time */ private static final long SLEEP_INTERVAL_MS = 500; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java index fe33c24d99f..1c860b42e98 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java @@ -32,8 +32,6 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; @@ -42,13 +40,15 @@ import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility methods for interacting with the regions. */ @InterfaceAudience.Private public abstract class ModifyRegionUtils { - private static final Log LOG = LogFactory.getLog(ModifyRegionUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(ModifyRegionUtils.class); private ModifyRegionUtils() { } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java index 1f19848317d..58057932bd7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MultiHConnection.java @@ -27,12 +27,12 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch; */ @InterfaceAudience.Private public class MultiHConnection { - private static final Log LOG = LogFactory.getLog(MultiHConnection.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiHConnection.class); private Connection[] connections; private final Object connectionsLock = new Object(); private final int noOfConnections; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java index 2dc1fe9c93d..711507b0f67 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionMover.java @@ -43,8 +43,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus.Option; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -64,6 +62,8 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tool for loading/unloading regions to/from given regionserver This tool can be run from Command @@ -82,7 +82,7 @@ public class RegionMover extends AbstractHBaseTool { public static final int DEFAULT_MOVE_RETRIES_MAX = 5; public static final int DEFAULT_MOVE_WAIT_MAX = 60; public static final int DEFAULT_SERVERSTART_WAIT_MAX = 180; - static final Log LOG = LogFactory.getLog(RegionMover.class); + static final Logger LOG = LoggerFactory.getLogger(RegionMover.class); private RegionMoverBuilder rmbuilder; private boolean ack = true; private int maxthreads = 1; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java index 91be6e84989..e41882fb375 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java @@ -26,9 +26,9 @@ import java.util.Map.Entry; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; @@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.TreeMultimap; */ @InterfaceAudience.Private public class RegionSplitCalculator { - private static final Log LOG = LogFactory.getLog(RegionSplitCalculator.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionSplitCalculator.class); private final Comparator rangeCmp; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java index 06bccd13abd..5f480a5eee5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java @@ -37,8 +37,6 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -56,6 +54,8 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; @@ -145,7 +145,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; */ @InterfaceAudience.Private public class RegionSplitter { - private static final Log LOG = LogFactory.getLog(RegionSplitter.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionSplitter.class); /** * A generic interface for the RegionSplitter code to use for all it's @@ -434,7 +434,7 @@ public class RegionSplitter { * Alternative getCurrentNrHRS which is no longer available. * @param connection * @return Rough count of regionservers out on cluster. - * @throws IOException + * @throws IOException */ private static int getRegionServerCount(final Connection connection) throws IOException { try (Admin admin = connection.getAdmin()) { @@ -729,7 +729,7 @@ public class RegionSplitter { } } catch (NoServerForRegionException nsfre) { // NSFRE will occur if the old hbase:meta entry has no server assigned - LOG.info(nsfre); + LOG.info(nsfre.toString(), nsfre); logicalSplitting.add(region); continue; } @@ -785,7 +785,7 @@ public class RegionSplitter { * @param conf * @param tableName * @return A Pair where first item is table dir and second is the split file. - * @throws IOException + * @throws IOException */ private static Pair getTableDirAndSplitFile(final Configuration conf, final TableName tableName) @@ -803,7 +803,7 @@ public class RegionSplitter { getTableDirAndSplitFile(connection.getConfiguration(), tableName); Path tableDir = tableDirAndSplitFile.getFirst(); Path splitFile = tableDirAndSplitFile.getSecond(); - + FileSystem fs = tableDir.getFileSystem(connection.getConfiguration()); // Using strings because (new byte[]{0}).equals(new byte[]{0}) == false @@ -949,6 +949,7 @@ public class RegionSplitter { this.rowComparisonLength = lastRow.length(); } + @Override public byte[] split(byte[] start, byte[] end) { BigInteger s = convertToBigInteger(start); BigInteger e = convertToBigInteger(end); @@ -956,6 +957,7 @@ public class RegionSplitter { return convertToByte(split2(s, e)); } + @Override public byte[][] split(int n) { Preconditions.checkArgument(lastRowInt.compareTo(firstRowInt) > 0, "last row (%s) is configured less than first row (%s)", lastRow, @@ -1009,19 +1011,23 @@ public class RegionSplitter { } } + @Override public byte[] firstRow() { return convertToByte(firstRowInt); } + @Override public byte[] lastRow() { return convertToByte(lastRowInt); } + @Override public void setFirstRow(String userInput) { firstRow = userInput; firstRowInt = new BigInteger(firstRow, radix); } + @Override public void setLastRow(String userInput) { lastRow = userInput; lastRowInt = new BigInteger(lastRow, radix); @@ -1029,14 +1035,17 @@ public class RegionSplitter { rowComparisonLength = lastRow.length(); } + @Override public byte[] strToRow(String in) { return convertToByte(new BigInteger(in, radix)); } + @Override public String rowToStr(byte[] row) { return Bytes.toStringBinary(row); } + @Override public String separator() { return " "; } @@ -1130,6 +1139,7 @@ public class RegionSplitter { byte[] firstRowBytes = ArrayUtils.EMPTY_BYTE_ARRAY; byte[] lastRowBytes = new byte[] {xFF, xFF, xFF, xFF, xFF, xFF, xFF, xFF}; + @Override public byte[] split(byte[] start, byte[] end) { return Bytes.split(start, end, 1)[1]; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java index 4175526ec6c..83ec5ffc63b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java @@ -26,9 +26,9 @@ import java.util.Locale; import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -40,7 +40,7 @@ import org.apache.hadoop.util.ToolRunner; */ @InterfaceAudience.Private public abstract class ServerCommandLine extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(ServerCommandLine.class); + private static final Logger LOG = LoggerFactory.getLogger(ServerCommandLine.class); @SuppressWarnings("serial") private static final Set DEFAULT_SKIP_WORDS = new HashSet() { { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java index 9b61b8b4106..fe514d8c1c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -37,13 +35,15 @@ import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.regionserver.RegionReplicaReplicationEndpoint; import org.apache.hadoop.hbase.zookeeper.ZKConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Similar to {@link RegionReplicaUtil} but for the server side */ public class ServerRegionReplicaUtil extends RegionReplicaUtil { - private static final Log LOG = LogFactory.getLog(ServerRegionReplicaUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(ServerRegionReplicaUtil.class); /** * Whether asynchronous WAL replication to the secondary region replicas is enabled or not. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java index 11327e8baa0..b22b4ff40b9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ZKDataMigrator.java @@ -22,8 +22,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.TableState; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * utlity method to migrate zookeeper data across HBase versions. @@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Private public class ZKDataMigrator { - private static final Log LOG = LogFactory.getLog(ZKDataMigrator.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKDataMigrator.class); /** * Method for table states migration. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java index 44bbb38ca5d..e937fa529aa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java @@ -31,9 +31,9 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter; */ @InterfaceAudience.Private public class HFileCorruptionChecker { - private static final Log LOG = LogFactory.getLog(HFileCorruptionChecker.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileCorruptionChecker.class); final Configuration conf; final FileSystem fs; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java index c208d8aa3a8..534b948bfed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.util.hbck; import java.io.IOException; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -46,7 +46,7 @@ import org.apache.hadoop.io.MultipleIOException; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceStability.Evolving public class OfflineMetaRepair { - private static final Log LOG = LogFactory.getLog(OfflineMetaRepair.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(OfflineMetaRepair.class.getName()); protected static void printUsageAndExit() { StringBuilder sb = new StringBuilder(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java index aba13c658fc..8bd9a3086ab 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java @@ -25,8 +25,6 @@ import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -35,6 +33,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.util.CancelableProgressable; @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe @InterfaceStability.Evolving public abstract class AbstractFSWALProvider> implements WALProvider { - private static final Log LOG = LogFactory.getLog(AbstractFSWALProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractFSWALProvider.class); /** Separate old log into different dir by regionserver name **/ public static final String SEPARATE_OLDLOGDIR = "hbase.separate.oldlogdir.by.regionserver"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java index 5cb01899dba..8bb1802e6b3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.wal; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -31,7 +29,8 @@ import org.apache.hadoop.hbase.util.CommonFSUtils.StreamLacksCapabilityException import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel; import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup; @@ -46,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.DefaultThreadFact @InterfaceStability.Evolving public class AsyncFSWALProvider extends AbstractFSWALProvider { - private static final Log LOG = LogFactory.getLog(AsyncFSWALProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(AsyncFSWALProvider.class); // Only public so classes back in regionserver.wal can access public interface AsyncWriter extends WALProvider.AsyncWriter { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java index cedf3509f5e..280d95fec30 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java @@ -26,8 +26,6 @@ import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; // imports for things that haven't moved from regionserver.wal yet. @@ -50,7 +50,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class DisabledWALProvider implements WALProvider { - private static final Log LOG = LogFactory.getLog(DisabledWALProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(DisabledWALProvider.class); WAL disabled; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java index b72e66841e7..14505a8a9ce 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/FSHLogProvider.java @@ -20,14 +20,14 @@ package org.apache.hadoop.hbase.wal; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; // imports for things that haven't moved from regionserver.wal yet. import org.apache.hadoop.hbase.regionserver.wal.FSHLog; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter; @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; @InterfaceStability.Evolving public class FSHLogProvider extends AbstractFSWALProvider { - private static final Log LOG = LogFactory.getLog(FSHLogProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(FSHLogProvider.class); // Only public so classes back in regionserver.wal can access public interface Writer extends WALProvider.Writer { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java index ab3a7d94198..b8c9484ab3b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/RegionGroupingProvider.java @@ -28,10 +28,10 @@ import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; // imports for classes still in regionserver.wal import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.util.Bytes; @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.IdLock; */ @InterfaceAudience.Private public class RegionGroupingProvider implements WALProvider { - private static final Log LOG = LogFactory.getLog(RegionGroupingProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionGroupingProvider.class); /** * Map identifiers to a group number. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java index f5b611bae83..c909e905dac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALEdit.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.wal; import java.io.IOException; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -35,7 +33,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor; @@ -55,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDe @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.REPLICATION, HBaseInterfaceAudience.COPROC }) public class WALEdit implements HeapSize { - private static final Log LOG = LogFactory.getLog(WALEdit.class); + private static final Logger LOG = LoggerFactory.getLogger(WALEdit.class); // TODO: Get rid of this; see HBASE-8457 public static final byte [] METAFAMILY = Bytes.toBytes("METAFAMILY"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java index 5855419683a..0628f8652f8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java @@ -29,12 +29,12 @@ import java.util.List; import java.util.OptionalLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; // imports for things that haven't moved from regionserver.wal yet. import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; @@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer; @InterfaceAudience.Private public class WALFactory implements WALFileLengthProvider { - private static final Log LOG = LogFactory.getLog(WALFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(WALFactory.class); /** * Maps between configuration names for providers and implementation classes. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java index ce1713a127e..18ea7d72517 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java @@ -52,8 +52,6 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileStatus; @@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.master.SplitLogManager; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; @@ -97,6 +96,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.hadoop.io.MultipleIOException; import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is responsible for splitting up a bunch of regionserver commit log @@ -105,7 +106,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class WALSplitter { - private static final Log LOG = LogFactory.getLog(WALSplitter.class); + private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class); /** By default we retry errors in splitting, rather than skipping. */ public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false; @@ -1538,7 +1539,7 @@ public class WALSplitter { } catch (IOException e) { e = e instanceof RemoteException ? ((RemoteException)e).unwrapRemoteException() : e; - LOG.fatal(" Got while writing log entry to log", e); + LOG.error(HBaseMarkers.FATAL, " Got while writing log entry to log", e); throw e; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java index 6a214385e60..37f3279daca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestTool.java @@ -28,8 +28,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Stream; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread; import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext; @@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Private public class AcidGuaranteesTestTool extends AbstractHBaseTool { - private static final Log LOG = LogFactory.getLog(AcidGuaranteesTestTool.class); + private static final Logger LOG = LoggerFactory.getLogger(AcidGuaranteesTestTool.class); public static final TableName TABLE_NAME = TableName.valueOf("TestAcidGuarantees"); public static final byte[] FAMILY_A = Bytes.toBytes("A"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java index 08565e07af4..8ee8be697ea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import java.io.File; import java.io.IOException; -import java.io.StringWriter; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; @@ -32,13 +31,9 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.util.Time; -import org.apache.log4j.Layout; import org.apache.log4j.Logger; -import org.apache.log4j.WriterAppender; import org.junit.Assert; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -117,42 +112,12 @@ public abstract class GenericTestUtils { TimedOutTestsListener.buildThreadDiagnosticString()); } - public static class LogCapturer { - private StringWriter sw = new StringWriter(); - private WriterAppender appender; - private Logger logger; - - public static LogCapturer captureLogs(Log l) { - Logger logger = ((Log4JLogger)l).getLogger(); - LogCapturer c = new LogCapturer(logger); - return c; - } - - - private LogCapturer(Logger logger) { - this.logger = logger; - Layout layout = Logger.getRootLogger().getAppender("stdout").getLayout(); - WriterAppender wa = new WriterAppender(layout, sw); - logger.addAppender(wa); - } - - public String getOutput() { - return sw.toString(); - } - - public void stopCapturing() { - logger.removeAppender(appender); - - } - } - - /** * Mockito answer helper that triggers one latch as soon as the * method is called, then waits on another before continuing. */ public static class DelayAnswer implements Answer { - private final Log LOG; + private final Logger LOG; private final CountDownLatch fireLatch = new CountDownLatch(1); private final CountDownLatch waitLatch = new CountDownLatch(1); @@ -165,7 +130,7 @@ public abstract class GenericTestUtils { private volatile Throwable thrown; private volatile Object returnValue; - public DelayAnswer(Log log) { + public DelayAnswer(Logger log) { this.LOG = log; } @@ -262,13 +227,13 @@ public abstract class GenericTestUtils { */ public static class DelegateAnswer implements Answer { private final Object delegate; - private final Log log; + private final Logger log; public DelegateAnswer(Object delegate) { this(null, delegate); } - public DelegateAnswer(Log log, Object delegate) { + public DelegateAnswer(Logger log, Object delegate) { this.log = log; this.delegate = delegate; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java index cac957bc88a..58ae0590913 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseCluster.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase; import java.io.Closeable; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService; @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.Threads; @InterfaceAudience.Private public abstract class HBaseCluster implements Closeable, Configurable { // Log is being used in DistributedHBaseCluster class, hence keeping it as package scope - static final Log LOG = LogFactory.getLog(HBaseCluster.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(HBaseCluster.class.getName()); protected Configuration conf; /** the status of the cluster before we begin */ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 32cffc0887a..92581b8fb63 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.util.NavigableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -31,6 +29,7 @@ import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionAsTable; @@ -38,6 +37,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import junit.framework.AssertionFailedError; import junit.framework.TestCase; @@ -49,7 +50,7 @@ import junit.framework.TestCase; */ @Deprecated public abstract class HBaseTestCase extends TestCase { - private static final Log LOG = LogFactory.getLog(HBaseTestCase.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseTestCase.class); protected final static byte [] fam1 = Bytes.toBytes("colfamily11"); protected final static byte [] fam2 = Bytes.toBytes("colfamily21"); @@ -115,7 +116,7 @@ public abstract class HBaseTestCase extends TestCase { testDir = FSUtils.getRootDir(conf); } } catch (Exception e) { - LOG.fatal("error during setup", e); + LOG.error(HBaseMarkers.FATAL, "error during setup", e); throw e; } } @@ -129,7 +130,7 @@ public abstract class HBaseTestCase extends TestCase { } } } catch (Exception e) { - LOG.fatal("error during tear down", e); + LOG.error(HBaseMarkers.FATAL, "error during tear down", e); } super.tearDown(); } @@ -284,7 +285,7 @@ public abstract class HBaseTestCase extends TestCase { * @throws IOException */ public static long addContent(final Table updater, - final String columnFamily, + final String columnFamily, final String column, final byte [] startKeyBytes, final byte [] endKey, final long ts) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index c30245a1820..853e96531f2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -55,8 +55,6 @@ import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Jdk14Logger; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; @@ -145,11 +143,14 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.TaskLog; import org.apache.hadoop.minikdc.MiniKdc; +import org.apache.log4j.LogManager; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.ZooKeeper.States; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.impl.Log4jLoggerAdapter; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; /** @@ -2600,9 +2601,11 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * @param clazz The class for which to switch to debug logging. */ public void enableDebug(Class clazz) { - Log l = LogFactory.getLog(clazz); + Logger l = LoggerFactory.getLogger(clazz); if (l instanceof Log4JLogger) { ((Log4JLogger) l).getLogger().setLevel(org.apache.log4j.Level.DEBUG); + } else if (l instanceof Log4jLoggerAdapter) { + LogManager.getLogger(clazz).setLevel(org.apache.log4j.Level.DEBUG); } else if (l instanceof Jdk14Logger) { ((Jdk14Logger) l).getLogger().setLevel(java.util.logging.Level.ALL); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java index b8a86c6d007..3ee6f7d6d4c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java @@ -22,14 +22,14 @@ import java.io.IOException; import java.security.SecureRandom; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.math3.random.RandomData; import org.apache.commons.math3.random.RandomDataImpl; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.crypto.CryptoCipherProvider; import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider; @@ -62,8 +62,8 @@ public class HFilePerformanceEvaluation { "WARN"); } - private static final Log LOG = - LogFactory.getLog(HFilePerformanceEvaluation.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(HFilePerformanceEvaluation.class.getName()); static byte [] format(final int i) { String v = Integer.toString(i); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java index 3f851813084..ad58124f5ac 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java @@ -25,9 +25,10 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.master.HMaster; @@ -53,7 +54,7 @@ import org.apache.hadoop.hbase.util.Threads; */ @InterfaceAudience.Public public class MiniHBaseCluster extends HBaseCluster { - private static final Log LOG = LogFactory.getLog(MiniHBaseCluster.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(MiniHBaseCluster.class.getName()); public LocalHBaseCluster hbaseCluster; private static int index; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java index c05830f6b44..f1e020f581d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -26,8 +26,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.client.ClusterConnection; @@ -56,7 +54,8 @@ import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import com.google.protobuf.Service; @@ -65,7 +64,7 @@ import com.google.protobuf.Service; * Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b */ public class MockRegionServerServices implements RegionServerServices { - protected static final Log LOG = LogFactory.getLog(MockRegionServerServices.class); + protected static final Logger LOG = LoggerFactory.getLogger(MockRegionServerServices.class); private final Map regions = new HashMap<>(); private final ConcurrentSkipListMap rit = new ConcurrentSkipListMap<>(Bytes.BYTES_COMPARATOR); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java index cf07b42a129..1d8de45e6d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java @@ -25,14 +25,14 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class MultithreadedTestUtil { - private static final Log LOG = - LogFactory.getLog(MultithreadedTestUtil.class); + private static final Logger LOG = + LoggerFactory.getLogger(MultithreadedTestUtil.class); public static class TestContext { private final Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java index e2350e81185..f919db787d9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java @@ -22,16 +22,16 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Code shared by PE tests. */ public class PerformanceEvaluationCommons { - private static final Log LOG = - LogFactory.getLog(PerformanceEvaluationCommons.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(PerformanceEvaluationCommons.class.getName()); public static void assertValueSize(final int expectedSize, final int got) { if (got != expectedSize) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java index 34e8c3c00d8..a690987c4f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionLocator; @@ -42,7 +40,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; /** @@ -51,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; */ @Category({MiscTests.class, MediumTests.class}) public class TestGlobalMemStoreSize { - private static final Log LOG = LogFactory.getLog(TestGlobalMemStoreSize.class); + private static final Logger LOG = LoggerFactory.getLogger(TestGlobalMemStoreSize.class); private static int regionServerNum = 4; private static int regionNum = 16; // total region num = region num + root and meta regions diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java index 1d752d2af55..cf54b8f875c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java @@ -35,8 +35,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -58,13 +56,15 @@ import org.junit.rules.TestName; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test our testing utility class */ @Category({MiscTests.class, LargeTests.class}) public class TestHBaseTestingUtility { - private static final Log LOG = LogFactory.getLog(TestHBaseTestingUtility.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseTestingUtility.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java index 643940cdd19..69687947351 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java @@ -25,8 +25,6 @@ import java.util.Collection; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -55,7 +53,8 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.wal.WAL; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor; @@ -81,7 +80,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDes */ @Category({MiscTests.class, LargeTests.class}) public class TestIOFencing { - private static final Log LOG = LogFactory.getLog(TestIOFencing.class); + private static final Logger LOG = LoggerFactory.getLogger(TestIOFencing.class); static { // Uncomment the following lines if more verbosity is needed for // debugging (see HBASE-12285 for details). diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java index e63eaf24e3b..ceb9f562572 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java @@ -26,8 +26,6 @@ import java.net.ServerSocket; import java.nio.channels.ServerSocketChannel; import java.util.Locale; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Assert; @@ -35,6 +33,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This tests whether ServerSocketChannel works over ipv6, which ZooKeeper @@ -49,7 +49,7 @@ import org.junit.rules.TestRule; */ @Category({MiscTests.class, SmallTests.class}) public class TestIPv6NIOServerSocketChannel { - private static final Log LOG = LogFactory.getLog(TestIPv6NIOServerSocketChannel.class); + private static final Logger LOG = LoggerFactory.getLogger(TestIPv6NIOServerSocketChannel.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). @@ -103,7 +103,7 @@ public class TestIPv6NIOServerSocketChannel { if (channel != null) { channel.close(); } - } + } } } @@ -126,8 +126,7 @@ public class TestIPv6NIOServerSocketChannel { //or java.net.SocketException: Protocol family not supported Assert.assertFalse(ex instanceof BindException); Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family")); - LOG.info("Received expected exception:"); - LOG.info(ex); + LOG.info("Received expected exception:", ex); //if this is the case, ensure that we are running on preferIPv4=true ensurePreferIPv4(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java index c5a817ba736..f3d31154965 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServers.java @@ -27,8 +27,6 @@ import java.io.InputStream; import java.net.URL; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -39,6 +37,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing, info servers are disabled. This test enables then and checks that @@ -46,7 +46,7 @@ import org.junit.rules.TestName; */ @Category({MiscTests.class, MediumTests.class}) public class TestInfoServers { - private static final Log LOG = LogFactory.getLog(TestInfoServers.class); + private static final Logger LOG = LoggerFactory.getLogger(TestInfoServers.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java index a5cf1a84234..444db644e25 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java @@ -24,8 +24,6 @@ import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnectorFactory; import javax.naming.ServiceUnavailableException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -41,13 +39,15 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test case for JMX Connector Server. */ @Category({ MiscTests.class, MediumTests.class }) public class TestJMXConnectorServer { - private static final Log LOG = LogFactory.getLog(TestJMXConnectorServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestJMXConnectorServer.class); private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static Configuration conf = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java index 6c8f27ad1b1..520294a8c93 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXListener.java @@ -24,8 +24,6 @@ import javax.management.MBeanServerConnection; import javax.management.remote.JMXConnector; import javax.management.remote.JMXConnectorFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -39,12 +37,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, MediumTests.class}) public class TestJMXListener { - private static final Log LOG = LogFactory.getLog(TestJMXListener.class); + private static final Logger LOG = LoggerFactory.getLogger(TestJMXListener.class); private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static int connectorPort = 61120; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java index 15c0b0ca296..14057de0692 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java @@ -34,8 +34,6 @@ import java.io.IOException; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; @@ -66,7 +64,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -75,7 +74,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MiscTests.class, MediumTests.class}) @SuppressWarnings("deprecation") public class TestMetaTableAccessor { - private static final Log LOG = LogFactory.getLog(TestMetaTableAccessor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableAccessor.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static Connection connection; private Random random = new Random(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java index 961677b10a2..7049a74581b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java @@ -27,8 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.NavigableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; import org.apache.hadoop.hbase.client.RegionInfo; @@ -49,7 +47,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; */ @Category({MiscTests.class, MediumTests.class}) public class TestMetaTableAccessorNoCluster { - private static final Log LOG = LogFactory.getLog(TestMetaTableAccessorNoCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableAccessorNoCluster.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final Abortable ABORTABLE = new Abortable() { boolean aborted = false; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java index c2a49451d9b..33cac664239 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java @@ -26,8 +26,6 @@ import static org.junit.Assert.assertFalse; import java.io.IOException; import java.net.ConnectException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.HConnectionTestingUtility; @@ -53,7 +51,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -62,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; */ @Category({MiscTests.class, MediumTests.class}) public class TestMetaTableLocator { - private static final Log LOG = LogFactory.getLog(TestMetaTableLocator.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableLocator.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final ServerName SN = ServerName.valueOf("example.org", 1234, System.currentTimeMillis()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java index 5fb16c79b09..bdb74a4b19f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMovedRegionsCleaner.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -28,6 +26,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; @@ -37,7 +37,7 @@ import java.io.IOException; */ @Category({ MiscTests.class, MediumTests.class }) public class TestMovedRegionsCleaner { - private static final Log LOG = LogFactory.getLog(TestRegionRebalancing.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionRebalancing.class); private final HBaseTestingUtility UTIL = new HBaseTestingUtility(); public static int numCalls = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java index 12605519f53..1a0215e5a94 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java @@ -28,8 +28,6 @@ import java.util.ArrayList; import java.util.List; import java.util.NavigableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TimestampTestBase.FlushCache; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; @@ -49,6 +47,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Port of old TestScanMultipleVersions, TestTimestamp and TestGetRowVersions @@ -56,7 +56,7 @@ import org.junit.rules.TestName; */ @Category({MiscTests.class, MediumTests.class}) public class TestMultiVersions { - private static final Log LOG = LogFactory.getLog(TestMultiVersions.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMultiVersions.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private Admin admin; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java index 78b9585b790..7b4c930af40 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java @@ -28,8 +28,6 @@ import java.io.IOException; import java.util.Set; import java.util.concurrent.Callable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.Admin; @@ -50,10 +48,12 @@ import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, MediumTests.class}) public class TestNamespace { - private static final Log LOG = LogFactory.getLog(TestNamespace.class); + private static final Logger LOG = LoggerFactory.getLogger(TestNamespace.class); private static HMaster master; protected final static int NUM_SLAVES_BASE = 4; private static HBaseTestingUtility TEST_UTIL; @@ -130,7 +130,7 @@ public class TestNamespace { try { admin.createNamespace(NamespaceDescriptor.DEFAULT_NAMESPACE); } catch (IOException exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -140,7 +140,7 @@ public class TestNamespace { try { admin.createNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE); } catch (IOException exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -153,7 +153,7 @@ public class TestNamespace { try { admin.deleteNamespace(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR); } catch (IOException exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -162,7 +162,7 @@ public class TestNamespace { try { admin.deleteNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR); } catch (IOException exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java index 9360b1f34e6..22daf490934 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java @@ -27,8 +27,6 @@ import java.io.IOException; import java.io.PrintWriter; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -39,11 +37,13 @@ import org.apache.hadoop.util.Shell; import org.junit.After; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestNodeHealthCheckChore { - private static final Log LOG = LogFactory.getLog(TestNodeHealthCheckChore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestNodeHealthCheckChore.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final int SCRIPT_TIMEOUT = 5000; private File healthScriptFile; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java index 3b2b6fed907..12f43d48678 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java @@ -30,8 +30,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.ClientScanner; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Put; @@ -56,6 +54,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * These tests are focused on testing how partial results appear to a client. Partial results are @@ -69,7 +69,7 @@ import org.junit.rules.TestName; */ @Category(MediumTests.class) public class TestPartialResultsFromClientSide { - private static final Log LOG = LogFactory.getLog(TestPartialResultsFromClientSide.class); + private static final Logger LOG = LoggerFactory.getLogger(TestPartialResultsFromClientSide.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static int MINICLUSTER_SIZE = 5; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java index 467aadacbd1..5f2898fadd9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java @@ -27,8 +27,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -47,7 +45,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; /** @@ -68,7 +67,7 @@ public class TestRegionRebalancing { } private static final byte[] FAMILY_NAME = Bytes.toBytes("col"); - private static final Log LOG = LogFactory.getLog(TestRegionRebalancing.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionRebalancing.class); private final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private RegionLocator regionLocator; private HTableDescriptor desc; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java index f987ea7ca1c..22ab8b9fe6c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java @@ -36,8 +36,6 @@ import java.util.Map; import java.util.Random; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -54,12 +52,14 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runners.MethodSorters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ MiscTests.class, MediumTests.class }) @FixMethodOrder(MethodSorters.NAME_ASCENDING) @Ignore public class TestStochasticBalancerJmxMetrics extends BalancerTestBase { - private static final Log LOG = LogFactory.getLog(TestStochasticBalancerJmxMetrics.class); + private static final Logger LOG = LoggerFactory.getLogger(TestStochasticBalancerJmxMetrics.class); private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static int connectorPort = 61120; private static StochasticLoadBalancer loadBalancer; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java index ed93b1750ac..b41e39907e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java @@ -29,8 +29,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Put; @@ -66,11 +64,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, LargeTests.class}) public class TestZooKeeper { - private static final Log LOG = LogFactory.getLog(TestZooKeeper.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZooKeeper.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java index c95f7b33e65..95de9bcc677 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -56,6 +54,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test that the {@link HFileArchiver} correctly removes all the parts of a region when cleaning up @@ -64,7 +64,7 @@ import org.junit.rules.TestName; @Category({MediumTests.class, MiscTests.class}) public class TestHFileArchiving { - private static final Log LOG = LogFactory.getLog(TestHFileArchiving.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileArchiving.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final byte[] TEST_FAM = Bytes.toBytes("fam"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java index 1c2279cc3c8..4eb1ae4973b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java @@ -27,8 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -65,6 +63,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Spin up a small cluster and check that the hfiles of region are properly long-term archived as @@ -73,7 +73,7 @@ import org.mockito.stubbing.Answer; @Category({MiscTests.class, MediumTests.class}) public class TestZooKeeperTableArchiveClient { - private static final Log LOG = LogFactory.getLog(TestZooKeeperTableArchiveClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZooKeeperTableArchiveClient.class); private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU(); private static final String STRING_TABLE_NAME = "test"; private static final byte[] TEST_FAM = Bytes.toBytes("fam"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java index 0b7da280c37..7e5ccf3de47 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java @@ -33,8 +33,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -65,7 +63,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest; @@ -76,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTable */ @Category({LargeTests.class, ClientTests.class}) public class TestAdmin1 { - private static final Log LOG = LogFactory.getLog(TestAdmin1.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAdmin1.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private Admin admin; @@ -1253,7 +1252,7 @@ public class TestAdmin1 { try { this.admin.deleteColumnFamily(tableName, Bytes.toBytes("col2")); } catch (TableNotDisabledException e) { - LOG.info(e); + LOG.info(e.toString(), e); } this.admin.disableTable(tableName); this.admin.deleteTable(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java index 953fae0876c..fb5febc4ba0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java @@ -37,8 +37,6 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus.Option; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -75,7 +73,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -86,7 +85,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; */ @Category({LargeTests.class, ClientTests.class}) public class TestAdmin2 { - private static final Log LOG = LogFactory.getLog(TestAdmin2.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAdmin2.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private Admin admin; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java index 83ba24411b7..525fa4c93f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java @@ -27,8 +27,6 @@ import java.util.function.Supplier; import java.util.regex.Pattern; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -41,13 +39,15 @@ import org.junit.Rule; import org.junit.rules.TestName; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class to test AsyncAdmin. */ public abstract class TestAsyncAdminBase { - protected static final Log LOG = LogFactory.getLog(TestAsyncAdminBase.class); + protected static final Logger LOG = LoggerFactory.getLogger(TestAsyncAdminBase.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected static final byte[] FAMILY = Bytes.toBytes("testFamily"); protected static final byte[] FAMILY_0 = Bytes.toBytes("cf0"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java index 05324aa8ef4..8b3b18166e0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBuilder.java @@ -31,8 +31,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -51,12 +49,14 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @RunWith(Parameterized.class) @Category({ LargeTests.class, ClientTests.class }) public class TestAsyncAdminBuilder { - private static final Log LOG = LogFactory.getLog(TestAsyncAdminBuilder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAsyncAdminBuilder.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static AsyncConnection ASYNC_CONN; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java index dd3655ebae5..ada8824ab8d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java @@ -64,7 +64,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase { TEST_UTIL.getConfiguration().setInt(START_LOG_ERRORS_AFTER_COUNT_KEY, 0); TEST_UTIL.startMiniCluster(1); ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get(); - master = ((MiniHBaseCluster) TEST_UTIL.getHBaseCluster()).getMaster(); + master = TEST_UTIL.getHBaseCluster().getMaster(); zkNamespaceManager = new ZKNamespaceManager(master.getZooKeeper()); zkNamespaceManager.start(); LOG.info("Done initializing cluster"); @@ -98,7 +98,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase { try { admin.deleteNamespace(NamespaceDescriptor.DEFAULT_NAMESPACE_NAME_STR).join(); } catch (Exception exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -107,7 +107,7 @@ public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase { try { admin.deleteNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR).join(); } catch (Exception exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java index c39a582c105..a3b160fb202 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java @@ -18,6 +18,13 @@ package org.apache.hadoop.hbase.client; +import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.util.Objects; +import java.util.concurrent.TimeUnit; + import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.quotas.QuotaCache; @@ -36,12 +43,6 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.util.concurrent.TimeUnit; - -import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - @RunWith(Parameterized.class) @Category({ ClientTests.class, MediumTests.class }) public class TestAsyncQuotaAdminApi extends TestAsyncAdminBase { @@ -180,7 +181,7 @@ public class TestAsyncQuotaAdminApi extends TestAsyncAdminBase { private int countResults(final QuotaFilter filter) throws Exception { int count = 0; for (QuotaSettings settings : admin.getQuota(filter).get()) { - LOG.debug(settings); + LOG.debug(Objects.toString(settings)); count++; } return count; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java index e6cffd6333f..dcccfd168b5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java @@ -418,7 +418,7 @@ public class TestAsyncRegionAdminApi extends TestAsyncAdminBase { } Thread.sleep(1000L); } catch (Exception e) { - LOG.error(e); + LOG.error(e.toString(), e); } } assertEquals(count, 2); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java index 9388144db70..7d95a712e6b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java @@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -70,11 +68,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ LargeTests.class, ClientTests.class }) @SuppressWarnings("deprecation") public class TestBlockEvictionFromClient { - private static final Log LOG = LogFactory.getLog(TestBlockEvictionFromClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestBlockEvictionFromClient.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); static byte[][] ROWS = new byte[2][]; private static int NO_OF_THREADS = 3; @@ -184,7 +184,7 @@ public class TestBlockEvictionFromClient { // get the block cache and region RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); - HRegion region = (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName) + HRegion region = TEST_UTIL.getRSForFirstRegionInTable(tableName) .getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); @@ -275,7 +275,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setCacheDataOnWrite(true); @@ -334,7 +334,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setCacheDataOnWrite(true); @@ -396,7 +396,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); BlockCache cache = setCacheProperties(region); Put put = new Put(ROW); put.addColumn(FAMILY, QUALIFIER, data); @@ -489,7 +489,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); BlockCache cache = setCacheProperties(region); Put put = new Put(ROW); @@ -573,7 +573,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setEvictOnClose(true); @@ -636,7 +636,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setCacheDataOnWrite(true); @@ -719,7 +719,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); BlockCache cache = setCacheProperties(region); Put put = new Put(ROW); @@ -819,7 +819,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setCacheDataOnWrite(true); @@ -885,7 +885,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setCacheDataOnWrite(true); @@ -1003,7 +1003,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setCacheDataOnWrite(true); @@ -1133,7 +1133,7 @@ public class TestBlockEvictionFromClient { RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(tableName); String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); HRegion region = - (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); + TEST_UTIL.getRSForFirstRegionInTable(tableName).getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); cacheConf.setCacheDataOnWrite(true); @@ -1617,7 +1617,7 @@ public class TestBlockEvictionFromClient { } } } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java index e92ba234533..cbead73d4fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -40,6 +38,8 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InterruptedIOException; @@ -51,7 +51,7 @@ import java.util.concurrent.atomic.AtomicInteger; @Category({MediumTests.class, ClientTests.class}) public class TestClientOperationInterrupt { - private static final Log LOG = LogFactory.getLog(TestClientOperationInterrupt.class); + private static final Logger LOG = LoggerFactory.getLogger(TestClientOperationInterrupt.class); private static HBaseTestingUtility util; private static final TableName tableName = TableName.valueOf("test"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java index 12c7faeb394..62eb31674f2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientPushback.java @@ -24,8 +24,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy; @@ -43,6 +41,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY; import static org.junit.Assert.assertEquals; @@ -56,7 +56,7 @@ import static org.junit.Assert.assertTrue; @Category(MediumTests.class) public class TestClientPushback { - private static final Log LOG = LogFactory.getLog(TestClientPushback.class); + private static final Logger LOG = LoggerFactory.getLogger(TestClientPushback.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final TableName tableName = TableName.valueOf("client-pushback"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java index c9686c2e7eb..e225d556d9a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java @@ -21,17 +21,11 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MiniHBaseCluster.MiniHBaseClusterRegionServer; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.CoordinatedStateManager; -import org.apache.hadoop.hbase.ipc.AbstractRpcClient; -import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse; import org.apache.hadoop.hbase.regionserver.HRegionServer; @@ -39,7 +33,6 @@ import org.apache.hadoop.hbase.regionserver.RSRpcServices; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; @@ -49,6 +42,8 @@ import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the scenario where a HRegionServer#scan() call, while scanning, timeout at client side and @@ -56,7 +51,7 @@ import org.junit.rules.TestName; */ @Category({MediumTests.class, ClientTests.class}) public class TestClientScannerRPCTimeout { - private static final Log LOG = LogFactory.getLog(TestClientScannerRPCTimeout.class); + private static final Logger LOG = LoggerFactory.getLogger(TestClientScannerRPCTimeout.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] FAMILY = Bytes.toBytes("testFamily"); private static final byte[] QUALIFIER = Bytes.toBytes("testQualifier"); @@ -69,9 +64,6 @@ public class TestClientScannerRPCTimeout { @BeforeClass public static void setUpBeforeClass() throws Exception { - ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL); - ((Log4JLogger)AbstractRpcClient.LOG).getLogger().setLevel(Level.ALL); - ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL); Configuration conf = TEST_UTIL.getConfiguration(); // Don't report so often so easier to see other rpcs conf.setInt("hbase.regionserver.msginterval", 3 * 10000); @@ -146,6 +138,7 @@ public class TestClientScannerRPCTimeout { super(conf); } + @Override protected RSRpcServices createRpcServices() throws IOException { return new RSRpcServicesWithScanTimeout(this); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java index f191bea06cb..96a123b65c1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -40,13 +38,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test clone snapshots from the client */ @Category({LargeTests.class, ClientTests.class}) public class TestCloneSnapshotFromClient { - private static final Log LOG = LogFactory.getLog(TestCloneSnapshotFromClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCloneSnapshotFromClient.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java index 46aa72fc979..141b13f1751 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -39,6 +37,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; @@ -60,7 +60,7 @@ public class TestDropTimeoutRequest { @Rule public TestName name = new TestName(); - private static final Log LOG = LogFactory.getLog(TestDropTimeoutRequest.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDropTimeoutRequest.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] FAM_NAM = Bytes.toBytes("f"); private static final int RPC_RETRY = 5; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java index 6b0359439df..ca4163ece5e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java @@ -24,8 +24,6 @@ import java.util.List; import java.util.Optional; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -53,6 +51,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -61,7 +61,7 @@ import static org.junit.Assert.fail; @Category({ MasterTests.class, MediumTests.class }) public class TestEnableTable { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestEnableTable.class); + private static final Logger LOG = LoggerFactory.getLogger(TestEnableTable.class); private static final byte[] FAMILYNAME = Bytes.toBytes("fam"); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java index 154f6eb700a..47516ec7581 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java @@ -32,8 +32,6 @@ import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -58,10 +56,12 @@ import org.junit.Test; import org.junit.Ignore; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MediumTests.class, ClientTests.class}) public class TestFastFail { - private static final Log LOG = LogFactory.getLog(TestFastFail.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFastFail.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] FAMILY = Bytes.toBytes("testFamily"); private static final Random random = new Random(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index f4ed71c96ce..c2a7f6b1ad5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -46,8 +46,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; @@ -111,6 +109,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Run tests that use the HBase clients; {@link Table}. @@ -121,7 +121,7 @@ import org.junit.rules.TestName; @SuppressWarnings ("deprecation") public class TestFromClientSide { // NOTE: Increment tests were moved to their own class, TestIncrementsFromClientSide. - private static final Log LOG = LogFactory.getLog(TestFromClientSide.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFromClientSide.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte [] ROW = Bytes.toBytes("testRow"); private static byte [] FAMILY = Bytes.toBytes("testFamily"); @@ -1160,7 +1160,7 @@ public class TestFromClientSide { // Null family (should NOT work) try { - TEST_UTIL.createTable(tableName, new byte[][]{(byte[])null}); + TEST_UTIL.createTable(tableName, new byte[][]{null}); fail("Creating a table with a null family passed, should fail"); } catch(Exception e) {} @@ -4766,7 +4766,7 @@ public class TestFromClientSide { // the error happens in a thread, it won't fail the test, // need to pass it to the caller for proper handling. error.set(e); - LOG.error(e); + LOG.error(e.toString(), e); } return null; @@ -5163,7 +5163,7 @@ public class TestFromClientSide { // get the block cache and region String regionName = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); - HRegion region = (HRegion) TEST_UTIL.getRSForFirstRegionInTable(tableName) + HRegion region = TEST_UTIL.getRSForFirstRegionInTable(tableName) .getRegion(regionName); HStore store = region.getStores().iterator().next(); CacheConfig cacheConf = store.getCacheConfig(); @@ -5516,18 +5516,18 @@ public class TestFromClientSide { // put the same row 4 times, with different values Put p = new Put(row); - p.addColumn(FAMILY, QUALIFIER, (long) 10, VALUE); + p.addColumn(FAMILY, QUALIFIER, 10, VALUE); table.put(p); p = new Put(row); - p.addColumn(FAMILY, QUALIFIER, (long) 11, ArrayUtils.add(VALUE, (byte) 2)); + p.addColumn(FAMILY, QUALIFIER, 11, ArrayUtils.add(VALUE, (byte) 2)); table.put(p); p = new Put(row); - p.addColumn(FAMILY, QUALIFIER, (long) 12, ArrayUtils.add(VALUE, (byte) 3)); + p.addColumn(FAMILY, QUALIFIER, 12, ArrayUtils.add(VALUE, (byte) 3)); table.put(p); p = new Put(row); - p.addColumn(FAMILY, QUALIFIER, (long) 13, ArrayUtils.add(VALUE, (byte) 4)); + p.addColumn(FAMILY, QUALIFIER, 13, ArrayUtils.add(VALUE, (byte) 4)); table.put(p); int versions = 4; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java index e5d5324c998..2d67b3e359d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java @@ -31,8 +31,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -74,10 +72,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({LargeTests.class, ClientTests.class}) public class TestFromClientSide3 { - private static final Log LOG = LogFactory.getLog(TestFromClientSide3.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFromClientSide3.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] FAMILY = Bytes.toBytes("testFamily"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java index fb9fb37a901..712dc55bf30 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java @@ -24,8 +24,6 @@ import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -59,13 +57,15 @@ import org.mockito.Matchers; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.ServiceException; @Category({SmallTests.class, ClientTests.class}) public class TestHBaseAdminNoCluster { - private static final Log LOG = LogFactory.getLog(TestHBaseAdminNoCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseAdminNoCluster.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java index 7e886920ab3..104b49949c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java @@ -41,8 +41,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; @@ -87,6 +85,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -98,7 +98,7 @@ public class TestHCM { .withTimeout(this.getClass()) .withLookingForStuckThread(true) .build(); - private static final Log LOG = LogFactory.getLog(TestHCM.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHCM.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLE_NAME = TableName.valueOf("test"); @@ -747,7 +747,7 @@ public class TestHCM { } } catch (Throwable t) { failed.set(t); - LOG.error(t); + LOG.error(t.toString(), t); } step.set(3); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java index 5c47de0c46f..f59a0e95044 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -38,10 +36,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({LargeTests.class, ClientTests.class}) public class TestHTableMultiplexer { - private static final Log LOG = LogFactory.getLog(TestHTableMultiplexer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHTableMultiplexer.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] FAMILY = Bytes.toBytes("testFamily"); private static byte[] QUALIFIER = Bytes.toBytes("testQualifier"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java index 99571cc83a8..657dfd8ab76 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java @@ -19,8 +19,6 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.MiniHBaseCluster; @@ -38,13 +36,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @Category({ LargeTests.class, ClientTests.class }) public class TestHTableMultiplexerFlushCache { - private static final Log LOG = LogFactory.getLog(TestHTableMultiplexerFlushCache.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHTableMultiplexerFlushCache.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] FAMILY = Bytes.toBytes("testFamily"); private static byte[] QUALIFIER1 = Bytes.toBytes("testQualifier_1"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java index 9d88dae7182..df39e0988aa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java @@ -18,21 +18,16 @@ */ package org.apache.hadoop.hbase.client; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.contains; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import java.lang.reflect.Field; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -42,21 +37,19 @@ import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.AppenderSkeleton; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; -import org.apache.log4j.spi.LoggingEvent; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; @Category({LargeTests.class, ClientTests.class}) public class TestIllegalTableDescriptor { // NOTE: Increment tests were moved to their own class, TestIncrementsFromClientSide. - private static final Log LOG = LogFactory.getLog(TestFromClientSide.class); + private static final Logger masterLogger; + protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte [] FAMILY = Bytes.toBytes("testFamily"); @@ -64,8 +57,17 @@ public class TestIllegalTableDescriptor { @Rule public TestName name = new TestName(); + static { + masterLogger = mock(Logger.class); + } + @BeforeClass public static void setUpBeforeClass() throws Exception { + // replacing HMaster.LOG with our mock logger for verifying logging + Field field = HMaster.class.getDeclaredField("LOG"); + field.setAccessible(true); + field.set(null, masterLogger); + Configuration conf = TEST_UTIL.getConfiguration(); conf.setBoolean("hbase.table.sanity.checks", true); // enable for below tests // We need more than one region server in this test @@ -165,20 +167,12 @@ public class TestIllegalTableDescriptor { htd.setMemStoreFlushSize(0); // Check that logs warn on invalid table but allow it. - ListAppender listAppender = new ListAppender(); - Logger log = Logger.getLogger(HMaster.class); - log.addAppender(listAppender); - log.setLevel(Level.WARN); - htd.setConfiguration("hbase.table.sanity.checks", Boolean.FALSE.toString()); checkTableIsLegal(htd); - assertFalse(listAppender.getMessages().isEmpty()); - assertTrue(listAppender.getMessages().get(0).startsWith("MEMSTORE_FLUSHSIZE for table " + verify(masterLogger).warn(contains("MEMSTORE_FLUSHSIZE for table " + "descriptor or \"hbase.hregion.memstore.flush.size\" (0) is too small, which might " + "cause very frequent flushing.")); - - log.removeAppender(listAppender); } private void checkTableIsLegal(HTableDescriptor htd) throws IOException { @@ -198,26 +192,4 @@ public class TestIllegalTableDescriptor { } assertFalse(admin.tableExists(htd.getTableName())); } - - private static class ListAppender extends AppenderSkeleton { - private final List messages = new ArrayList<>(); - - @Override - protected void append(LoggingEvent event) { - messages.add(event.getMessage().toString()); - } - - @Override - public void close() { - } - - @Override - public boolean requiresLayout() { - return false; - } - - public List getMessages() { - return messages; - } - } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java index 14f5d6752c6..d69d01eeb76 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java @@ -30,8 +30,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -51,6 +49,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Run Increment tests that use the HBase clients; {@link HTable}. @@ -63,7 +63,7 @@ import org.junit.rules.TestName; */ @Category(LargeTests.class) public class TestIncrementsFromClientSide { - final Log LOG = LogFactory.getLog(getClass()); + final Logger LOG = LoggerFactory.getLogger(getClass()); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte [] ROW = Bytes.toBytes("testRow"); private static byte [] FAMILY = Bytes.toBytes("testFamily"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java index 87d8a6e9acc..661d3f157c8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CompatibilityFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -43,12 +41,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(LargeTests.class) public class TestLeaseRenewal { public MetricsAssertHelper HELPER = CompatibilityFactory.getInstance(MetricsAssertHelper.class); - final Log LOG = LogFactory.getLog(getClass()); + final Logger LOG = LoggerFactory.getLogger(getClass()); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] FAMILY = Bytes.toBytes("testFamily"); private static final byte[] ANOTHERROW = Bytes.toBytes("anotherrow"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java index 5660a017ca7..62a42ce4472 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java @@ -30,8 +30,6 @@ import java.util.EnumSet; import java.util.List; import java.util.concurrent.ExecutorService; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -67,6 +65,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import edu.umd.cs.findbugs.annotations.Nullable; @@ -79,7 +79,7 @@ public class TestMetaWithReplicas { withTimeout(this.getClass()). withLookingForStuckThread(true). build(); - private static final Log LOG = LogFactory.getLog(TestMetaWithReplicas.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMetaWithReplicas.class); private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java index 3bd8f2e2383..e22391ef3e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.TableName; @@ -32,13 +30,15 @@ import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test to verify that the cloned table is independent of the table from which it was cloned */ @Category(LargeTests.class) public class TestMobSnapshotCloneIndependence extends TestSnapshotCloneIndependence { - private static final Log LOG = LogFactory.getLog(TestMobSnapshotCloneIndependence.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMobSnapshotCloneIndependence.class); @ClassRule public static final TestRule timeout = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java index 268bc14ea99..827c932f58a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils; @@ -26,6 +24,8 @@ import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -35,7 +35,7 @@ import org.junit.experimental.categories.Category; */ @Category({LargeTests.class, ClientTests.class}) public class TestMobSnapshotFromClient extends TestSnapshotFromClient { - private static final Log LOG = LogFactory.getLog(TestMobSnapshotFromClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMobSnapshotFromClient.class); /** * Setup the config for the cluster diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java index cfa7f373584..14ab6c903bf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java @@ -30,8 +30,6 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -53,10 +51,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MediumTests.class, FlakeyTests.class}) public class TestMultiParallel { - private static final Log LOG = LogFactory.getLog(TestMultiParallel.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMultiParallel.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final byte[] VALUE = Bytes.toBytes("value"); @@ -111,7 +111,7 @@ public class TestMultiParallel { // Don't use integer as a multiple, so that we have a number of keys that is // not a multiple of the number of regions - int numKeys = (int) ((float) starterKeys.length * 10.33F); + int numKeys = (int) (starterKeys.length * 10.33F); List keys = new ArrayList<>(); for (int i = 0; i < numKeys; i++) { @@ -232,7 +232,7 @@ public class TestMultiParallel { table.batch(actions, r); fail(); } catch (RetriesExhaustedWithDetailsException ex) { - LOG.debug(ex); + LOG.debug(ex.toString(), ex); // good! assertFalse(ex.mayHaveClusterIssues()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java index 807d59afe66..ee39a83c37f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java @@ -25,8 +25,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -39,6 +37,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Run tests related to {@link org.apache.hadoop.hbase.filter.TimestampsFilter} using HBase client APIs. @@ -47,7 +47,7 @@ import org.junit.rules.TestName; */ @Category({LargeTests.class, ClientTests.class}) public class TestMultipleTimestamps { - private static final Log LOG = LogFactory.getLog(TestMultipleTimestamps.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMultipleTimestamps.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index 22d0e8e1de5..ba4a8c29770 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -30,8 +30,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -66,10 +64,12 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MediumTests.class, ClientTests.class}) public class TestReplicaWithCluster { - private static final Log LOG = LogFactory.getLog(TestReplicaWithCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicaWithCluster.class); private static final int NB_SERVERS = 3; private static final byte[] row = TestReplicaWithCluster.class.getName().getBytes(); @@ -115,7 +115,7 @@ public class TestReplicaWithCluster { } } } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } } else { LOG.info("We're not the primary replicas."); @@ -555,7 +555,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } // But if we ask for stale we will get it @@ -590,7 +590,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } // But if we ask for stale we will get it @@ -636,7 +636,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } try { @@ -768,7 +768,7 @@ public class TestReplicaWithCluster { try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } // Simulating the RS down diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java index a34b651352c..a06055ded29 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java @@ -35,9 +35,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import com.codahale.metrics.Counter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -62,7 +60,6 @@ import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Level; import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.AfterClass; @@ -71,6 +68,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for region replicas. Sad that we cannot isolate these without bringing up a whole @@ -79,11 +78,7 @@ import org.junit.experimental.categories.Category; @Category({MediumTests.class, ClientTests.class}) @SuppressWarnings("deprecation") public class TestReplicasClient { - private static final Log LOG = LogFactory.getLog(TestReplicasClient.class); - - static { - ((Log4JLogger)RpcRetryingCallerImpl.LOG).getLogger().setLevel(Level.ALL); - } + private static final Logger LOG = LoggerFactory.getLogger(TestReplicasClient.class); private static final int NB_SERVERS = 1; private static Table table = null; @@ -161,7 +156,7 @@ public class TestReplicasClient { } } } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } } else { LOG.info("We're not the primary replicas."); @@ -175,7 +170,7 @@ public class TestReplicasClient { } } } catch (InterruptedException e1) { - LOG.error(e1); + LOG.error(e1.toString(), e1); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java index e87602e591e..33352985a6b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java @@ -29,8 +29,6 @@ import java.util.NoSuchElementException; import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellScanner; @@ -40,11 +38,13 @@ import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({SmallTests.class, ClientTests.class}) public class TestResult extends TestCase { - private static final Log LOG = LogFactory.getLog(TestResult.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestResult.class.getName()); static KeyValue[] genKVs(final byte[] row, final byte[] family, final byte[] value, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java index 14214d5d2c4..d2191e18e8a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -37,6 +35,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test various scanner timeout issues. @@ -47,7 +47,7 @@ public class TestScannerTimeout { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestScannerTimeout.class); + private static final Logger LOG = LoggerFactory.getLogger(TestScannerTimeout.class); private final static byte[] SOME_BYTES = Bytes.toBytes("f"); private final static TableName TABLE_NAME = TableName.valueOf("t"); private final static int NB_ROWS = 10; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java index 43be5731d03..f0060e4d888 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java @@ -29,8 +29,6 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.stream.IntStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -57,13 +55,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A client-side test, mostly testing scanners with various parameters. */ @Category({MediumTests.class, ClientTests.class}) public class TestScannersFromClientSide { - private static final Log LOG = LogFactory.getLog(TestScannersFromClientSide.class); + private static final Logger LOG = LoggerFactory.getLogger(TestScannersFromClientSide.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte [] ROW = Bytes.toBytes("testRow"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java index 4ef354998e4..75a3b8e6bd6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java @@ -25,8 +25,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -38,12 +36,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @Category(LargeTests.class) public class TestSizeFailures { - private static final Log LOG = LogFactory.getLog(TestSizeFailures.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSizeFailures.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte [] FAMILY = Bytes.toBytes("testFamily"); protected static int SLAVES = 1; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java index fd9f614151b..b050397f573 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSmallReversedScanner.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.client; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -29,12 +27,14 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; @Category(MediumTests.class) public class TestSmallReversedScanner { - public static final Log LOG = LogFactory.getLog(TestSmallReversedScanner.class); + public static final Logger LOG = LoggerFactory.getLogger(TestSmallReversedScanner.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLE_NAME = TableName.valueOf("testReversedSmall"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java index 5688617fecb..da422f3f5d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.client; import java.util.List; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -51,13 +49,15 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test to verify that the cloned table is independent of the table from which it was cloned */ @Category({LargeTests.class, ClientTests.class}) public class TestSnapshotCloneIndependence { - private static final Log LOG = LogFactory.getLog(TestSnapshotCloneIndependence.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotCloneIndependence.class); @ClassRule public static final TestRule timeout = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index baba1954044..0e6f2e97783 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -56,6 +54,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test create/using/deleting snapshots from the client @@ -64,7 +64,7 @@ import org.junit.rules.TestName; */ @Category({LargeTests.class, ClientTests.class}) public class TestSnapshotFromClient { - private static final Log LOG = LogFactory.getLog(TestSnapshotFromClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromClient.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); protected static final int NUM_RS = 2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java index 99c4340a147..49c656067bf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java @@ -25,8 +25,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,13 +47,15 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test class to verify that metadata is consistent before and after a snapshot attempt. */ @Category({MediumTests.class, ClientTests.class}) public class TestSnapshotMetadata { - private static final Log LOG = LogFactory.getLog(TestSnapshotMetadata.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotMetadata.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final int NUM_RS = 2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java index ba07755c13b..cd828bffd1f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java @@ -32,8 +32,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -64,7 +62,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @Category({ClientTests.class, MediumTests.class}) public class TestTableFavoredNodes { - private static final Log LOG = LogFactory.getLog(TestTableFavoredNodes.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableFavoredNodes.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static int WAIT_TIMEOUT = 60000; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java index 535a34d7037..56a0792baf9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableSnapshotScanner.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -42,11 +40,13 @@ import org.junit.After; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({LargeTests.class, ClientTests.class}) public class TestTableSnapshotScanner { - private static final Log LOG = LogFactory.getLog(TestTableSnapshotScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableSnapshotScanner.class); private final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final int NUM_REGION_SERVERS = 2; private static final byte[][] FAMILIES = {Bytes.toBytes("f1"), Bytes.toBytes("f2")}; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java index 0a1fafe398c..89af5de61c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java @@ -25,8 +25,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.TimestampsFilter; @@ -41,6 +39,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Run tests related to {@link TimestampsFilter} using HBase client APIs. @@ -49,7 +49,7 @@ import org.junit.rules.TestName; */ @Category({MediumTests.class, ClientTests.class}) public class TestTimestampsFilter { - private static final Log LOG = LogFactory.getLog(TestTimestampsFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTimestampsFilter.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java index d54cb53a4b6..6511a42ce37 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestUpdateConfiguration.java @@ -26,8 +26,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.ServerName; @@ -35,10 +33,12 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MediumTests.class}) public class TestUpdateConfiguration { - private static final Log LOG = LogFactory.getLog(TestUpdateConfiguration.class); + private static final Logger LOG = LoggerFactory.getLogger(TestUpdateConfiguration.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @BeforeClass diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java index 4ac3654203f..306e1dc73f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/locking/TestEntityLocks.java @@ -33,8 +33,6 @@ import static org.mockito.Mockito.when; import java.util.Random; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -55,10 +53,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ClientTests.class, SmallTests.class}) public class TestEntityLocks { - private static final Log LOG = LogFactory.getLog(TestEntityLocks.class); + private static final Logger LOG = LoggerFactory.getLogger(TestEntityLocks.class); private final Configuration conf = HBaseConfiguration.create(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java index 83a2e12dd58..67c635b9f7d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java @@ -27,8 +27,6 @@ import java.util.Map; import java.util.Set; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -50,6 +48,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -64,8 +64,8 @@ import static org.junit.Assert.fail; @Category({MediumTests.class, ClientTests.class}) public class TestReplicationAdmin { - private static final Log LOG = - LogFactory.getLog(TestReplicationAdmin.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestReplicationAdmin.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java index c035f29a46d..bba27fe6c1f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java @@ -24,9 +24,9 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.KeyValue; @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.Bytes; public class CodecPerformance { /** @deprecated LOG variable would be made private. since 1.2, remove in 3.0 */ @Deprecated - public static final Log LOG = LogFactory.getLog(CodecPerformance.class); + public static final Logger LOG = LoggerFactory.getLogger(CodecPerformance.class); static Cell [] getCells(final int howMany) { Cell [] cells = new Cell[howMany]; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java index 4a42b68768e..24aa1923419 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java @@ -27,8 +27,6 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; @@ -37,13 +35,14 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; @Category({MiscTests.class, SmallTests.class}) public class TestCellMessageCodec { - private static final Log LOG = LogFactory.getLog(TestCellMessageCodec.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCellMessageCodec.class); @Test public void testEmptyWorks() throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java index ab4ebc58b46..1f8dbc40fc9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/conf/TestConfigurationManager.java @@ -21,17 +21,17 @@ package org.apache.hadoop.hbase.conf; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({SmallTests.class, ClientTests.class}) public class TestConfigurationManager { - private static final Log LOG = LogFactory.getLog(TestConfigurationManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TestConfigurationManager.class); class DummyConfigurationObserver implements ConfigurationObserver { private boolean notifiedOnChange = false; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java index c2a16af7bae..35bcd77b15c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java @@ -24,8 +24,6 @@ import static org.junit.Assert.fail; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; @@ -42,14 +40,16 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Do the complex testing of constraints against a minicluster */ @Category({MiscTests.class, MediumTests.class}) public class TestConstraint { - private static final Log LOG = LogFactory - .getLog(TestConstraint.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestConstraint.class); private static HBaseTestingUtility util; private static final TableName tableName = TableName.valueOf("test"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java index 9f58fc4a475..f3d90f6d61c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALCoprocessor.java @@ -24,8 +24,6 @@ import java.util.Arrays; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -34,6 +32,8 @@ import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for testing WALObserver coprocessor. It will monitor WAL writing and restoring, and modify @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.wal.WALKey; public class SampleRegionWALCoprocessor implements WALCoprocessor, RegionCoprocessor, WALObserver, RegionObserver { - private static final Log LOG = LogFactory.getLog(SampleRegionWALCoprocessor.class); + private static final Logger LOG = LoggerFactory.getLogger(SampleRegionWALCoprocessor.class); private byte[] tableName; private byte[] row; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java index 61b4808515a..44216ec72cc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java @@ -35,8 +35,6 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -73,11 +71,13 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({CoprocessorTests.class, SmallTests.class}) public class TestCoprocessorInterface { @Rule public TestName name = new TestName(); - private static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorInterface.class); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); static final Path DIR = TEST_UTIL.getDataTestDir(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java index b2c106255df..3789a2a3e62 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java @@ -25,8 +25,7 @@ import java.util.Optional; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -71,6 +70,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -84,7 +85,7 @@ import static org.junit.Assert.assertTrue; @Category({CoprocessorTests.class, MediumTests.class}) public class TestCoprocessorMetrics { - private static final Log LOG = LogFactory.getLog(TestCoprocessorMetrics.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorMetrics.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final byte[] foo = Bytes.toBytes("foo"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java index 1d8acdfe3ab..f96da7009ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorStop.java @@ -20,8 +20,7 @@ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.fs.Path; @@ -33,6 +32,9 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertTrue; @@ -42,7 +44,7 @@ import static org.junit.Assert.assertTrue; */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestCoprocessorStop { - private static final Log LOG = LogFactory.getLog(TestCoprocessorStop.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorStop.class); private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final String MASTER_FILE = "master" + System.currentTimeMillis(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java index d24711a6fd3..0309eaa5121 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java @@ -30,8 +30,6 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -70,7 +68,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; @@ -83,7 +82,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNa */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestMasterObserver { - private static final Log LOG = LogFactory.getLog(TestMasterObserver.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterObserver.class); public static CountDownLatch tableCreationLatch = new CountDownLatch(1); public static CountDownLatch tableDeletionLatch = new CountDownLatch(1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java index 30b3d71a854..877265d29bd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestNegativeMemStoreSizeWithSlowCoprocessor.java @@ -12,8 +12,6 @@ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -31,6 +29,8 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test that verifies we do not have memstore size negative when a postPut/Delete hook is @@ -39,8 +39,9 @@ import org.junit.experimental.categories.Category; */ @Category(LargeTests.class) public class TestNegativeMemStoreSizeWithSlowCoprocessor { + static final Logger LOG = + LoggerFactory.getLogger(TestNegativeMemStoreSizeWithSlowCoprocessor.class); - static final Log LOG = LogFactory.getLog(TestNegativeMemStoreSizeWithSlowCoprocessor.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] tableName = Bytes.toBytes("test_table"); private static final byte[] family = Bytes.toBytes("f"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java index 3e1621cf558..07a1fa99f40 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java @@ -23,8 +23,6 @@ import java.util.Arrays; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -53,6 +51,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -60,8 +60,8 @@ import static org.junit.Assert.assertNotNull; @Category(MediumTests.class) public class TestRegionObserverForAddingMutationsFromCoprocessors { - private static final Log LOG - = LogFactory.getLog(TestRegionObserverForAddingMutationsFromCoprocessors.class); + private static final Logger LOG + = LoggerFactory.getLogger(TestRegionObserverForAddingMutationsFromCoprocessors.class); private static HBaseTestingUtility util; private static final byte[] dummy = Bytes.toBytes("dummy"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java index b55d8019d00..5f87d7ded11 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java @@ -30,8 +30,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -90,12 +88,13 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @Category({ CoprocessorTests.class, MediumTests.class }) public class TestRegionObserverInterface { - private static final Log LOG = LogFactory.getLog(TestRegionObserverInterface.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionObserverInterface.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java index aea09bbd140..ef0c3eb05ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java @@ -23,8 +23,6 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests unhandled exceptions thrown by coprocessors running on a regionserver.. @@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category; */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestRegionServerCoprocessorExceptionWithAbort { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( TestRegionServerCoprocessorExceptionWithAbort.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLE_NAME = TableName.valueOf("observed_table"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java index 0dd2c8cd8db..86a0d391f1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java @@ -32,8 +32,6 @@ import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -74,6 +72,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests invocation of the @@ -82,7 +82,7 @@ import org.junit.rules.TestName; */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestWALObserver { - private static final Log LOG = LogFactory.getLog(TestWALObserver.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALObserver.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] TEST_TABLE = Bytes.toBytes("observedTable"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java index 650e4d6212e..8ec1a44f093 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java @@ -20,13 +20,13 @@ package org.apache.hadoop.hbase.errorhandling; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test that we propagate errors through an dispatcher exactly once via different failure @@ -34,7 +34,7 @@ import org.mockito.Mockito; */ @Category({MasterTests.class, SmallTests.class}) public class TestForeignExceptionDispatcher { - private static final Log LOG = LogFactory.getLog(TestForeignExceptionDispatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(TestForeignExceptionDispatcher.class); /** * Exception thrown from the test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java index 37af804cb66..27bc6e1c7ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java @@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.errorhandling; import static org.junit.Assert.fail; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the {@link TimeoutExceptionInjector} to ensure we fulfill contracts @@ -33,7 +33,7 @@ import org.mockito.Mockito; @Category({MasterTests.class, SmallTests.class}) public class TestTimeoutExceptionInjector { - private static final Log LOG = LogFactory.getLog(TestTimeoutExceptionInjector.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTimeoutExceptionInjector.class); /** * Test that a manually triggered timer fires an exception. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java index d3de5393890..b0b17f97181 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java @@ -25,8 +25,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.Waiter.Predicate; @@ -36,12 +34,14 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.mockito.Mockito.*; @Category({MiscTests.class, SmallTests.class}) public class TestExecutorService { - private static final Log LOG = LogFactory.getLog(TestExecutorService.class); + private static final Logger LOG = LoggerFactory.getLogger(TestExecutorService.class); @Test public void testExecutorService() throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java index 7041c92da8e..ac7928f931a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java @@ -27,7 +27,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -38,14 +37,10 @@ import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.ScannerCallable; import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.ipc.AbstractRpcClient; -import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; @@ -112,9 +107,6 @@ public class FilterTestingCluster { @BeforeClass public static void setUp() throws Exception { - ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL); - ((Log4JLogger)AbstractRpcClient.LOG).getLogger().setLevel(Level.ALL); - ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL); TEST_UTIL.startMiniCluster(1); initialize(TEST_UTIL.getConfiguration()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java index f03a4f0e9d8..7bcce946e06 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java @@ -26,8 +26,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -46,6 +44,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class StringRange { @@ -124,7 +124,7 @@ public class TestColumnRangeFilter { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestColumnRangeFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(TestColumnRangeFilter.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java index d84fbe94817..ae90c63b717 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java @@ -28,8 +28,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; @@ -52,10 +50,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({FilterTests.class, SmallTests.class}) public class TestDependentColumnFilter { - private static final Log LOG = LogFactory.getLog(TestDependentColumnFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDependentColumnFilter.class); private static final byte[][] ROWS = { Bytes.toBytes("test1"),Bytes.toBytes("test2") }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index b4d1935b079..b28c23c8bcd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -28,8 +28,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; @@ -63,13 +61,15 @@ import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test filters at the HRegion doorstep. */ @Category({FilterTests.class, SmallTests.class}) public class TestFilter { - private final static Log LOG = LogFactory.getLog(TestFilter.class); + private final static Logger LOG = LoggerFactory.getLogger(TestFilter.class); private HRegion region; private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java index 2a13ac8a663..59c6de29889 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.*; @@ -34,6 +32,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests filter Lists in ways that rely on a MiniCluster. Where possible, favor tests in @@ -42,7 +42,7 @@ import org.junit.rules.TestName; @Category({ MediumTests.class, FilterTests.class }) public class TestFilterListOnMini { - private static final Log LOG = LogFactory.getLog(TestFilterListOnMini.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFilterListOnMini.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java index 39abc95f060..1f1e919feb6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -44,6 +42,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /* * This test is for the optimization added in HBASE-15243. @@ -53,7 +53,8 @@ import org.junit.rules.TestName; public class TestFilterListOrOperatorWithBlkCnt { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestFilterListOrOperatorWithBlkCnt.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFilterListOrOperatorWithBlkCnt.class); private byte[] family = Bytes.toBytes("family"); private byte[] qf = Bytes.toBytes("qf"); private byte[] value = Bytes.toBytes("val"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java index 79dc36f6889..29845c960d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java @@ -27,8 +27,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.TableName; @@ -43,14 +41,16 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test if Filter is incompatible with scan-limits */ @Category({FilterTests.class, MediumTests.class}) public class TestFilterWithScanLimits extends FilterTestingCluster { - private static final Log LOG = LogFactory - .getLog(TestFilterWithScanLimits.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestFilterWithScanLimits.class); private static final TableName tableName = TableName.valueOf("scanWithLimit"); private static final String columnFamily = "f1"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java index e779706ebbf..45567769617 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -55,6 +53,8 @@ import org.junit.Test; import static org.junit.Assert.*; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test if the FilterWrapper retains the same semantics defined in the @@ -62,7 +62,7 @@ import org.junit.experimental.categories.Category; */ @Category({FilterTests.class, MediumTests.class}) public class TestFilterWrapper { - private static final Log LOG = LogFactory.getLog(TestFilterWrapper.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFilterWrapper.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java index 36cf068395e..a8c8afc7ebf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java @@ -25,8 +25,6 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -51,13 +49,15 @@ import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** */ @Category({FilterTests.class, MediumTests.class}) public class TestFuzzyRowAndColumnRangeFilter { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestFuzzyRowAndColumnRangeFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowAndColumnRangeFilter.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java index b043e073ac6..673857804b9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -48,6 +46,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.nio.ByteBuffer; @@ -63,7 +63,7 @@ import static org.junit.Assert.assertEquals; public class TestFuzzyRowFilterEndToEnd { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static byte fuzzyValue = (byte) 63; - private static final Log LOG = LogFactory.getLog(TestFuzzyRowFilterEndToEnd.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowFilterEndToEnd.class); private static int firstPartCardinality = 50; private static int secondPartCardinality = 50; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java index a5d04d21333..ff9db7572c6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java @@ -24,8 +24,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -46,12 +44,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) public class TestMultiRowRangeFilter { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestMultiRowRangeFilter.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMultiRowRangeFilter.class); private byte[] family = Bytes.toBytes("family"); private byte[] qf = Bytes.toBytes("qf"); private byte[] value = Bytes.toBytes("val"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java index db12989f5c4..495f63fd89e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java @@ -19,8 +19,6 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.codec.binary.Hex; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -35,6 +33,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -46,8 +46,8 @@ import java.util.List; */ @Category({FilterTests.class, MediumTests.class}) public class TestScanRowPrefix extends FilterTestingCluster { - private static final Log LOG = LogFactory - .getLog(TestScanRowPrefix.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestScanRowPrefix.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java index a6011ad79e5..b24d30bdce3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java @@ -28,9 +28,6 @@ import java.net.ServerSocket; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; @@ -64,7 +61,6 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.ipc.RemoteException; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -72,18 +68,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for the hdfs fix from HBASE-6435. */ @Category({MiscTests.class, LargeTests.class}) public class TestBlockReorder { - private static final Log LOG = LogFactory.getLog(TestBlockReorder.class); - - static { - ((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL); - ((Log4JLogger) HFileSystem.LOG).getLogger().setLevel(Level.ALL); - } + private static final Logger LOG = LoggerFactory.getLogger(TestBlockReorder.class); private Configuration conf; private MiniDFSCluster cluster; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java index da45fdadb73..94df090049c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java @@ -40,8 +40,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; @@ -70,6 +68,8 @@ import org.apache.hadoop.hbase.util.ClassSize; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing the sizing that HeapSize offers and compares to the size given by @@ -77,7 +77,7 @@ import org.junit.experimental.categories.Category; */ @Category({IOTests.class, SmallTests.class}) public class TestHeapSize { - private static final Log LOG = LogFactory.getLog(TestHeapSize.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHeapSize.class); // List of classes implementing HeapSize // BatchOperation, BatchUpdate, BlockIndex, Entry, Entry, HStoreKey // KeyValue, LruBlockCache, Put, WALKey diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java index 6c190374657..75bd7ff7dc1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java @@ -36,8 +36,6 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.ThreadLocalRandom; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -54,7 +52,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.io.netty.channel.Channel; import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop; import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup; @@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.channel.socket.nio.NioSocketChann @Category({ MiscTests.class, MediumTests.class }) public class TestFanOutOneBlockAsyncDFSOutput { - private static final Log LOG = LogFactory.getLog(TestFanOutOneBlockAsyncDFSOutput.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFanOutOneBlockAsyncDFSOutput.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java index 1901dcea7c1..b7b0998f9b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java @@ -16,8 +16,6 @@ */ package org.apache.hadoop.hbase.io.encoding; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -43,6 +41,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -58,7 +58,7 @@ import static org.junit.Assert.assertTrue; */ @Category({IOTests.class, LargeTests.class}) public class TestChangingEncoding { - private static final Log LOG = LogFactory.getLog(TestChangingEncoding.class); + private static final Logger LOG = LoggerFactory.getLogger(TestChangingEncoding.class); static final String CF = "EncodingTestCF"; static final byte[] CF_BYTES = Bytes.toBytes(CF); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java index 27fd46d4bd1..f41db938205 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java @@ -30,8 +30,6 @@ import java.util.Collection; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; @@ -59,6 +57,8 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -69,7 +69,7 @@ import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestDataBlockEncoders { - private static final Log LOG = LogFactory.getLog(TestDataBlockEncoders.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDataBlockEncoders.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java index dab867327f8..380e6fbbd67 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java @@ -22,11 +22,11 @@ import static org.junit.Assert.*; import java.io.IOException; import java.util.Map; import java.util.NavigableSet; +import java.util.Objects; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.databind.JsonMappingException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -38,10 +38,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({IOTests.class, SmallTests.class}) public class TestBlockCacheReporting { - private static final Log LOG = LogFactory.getLog(TestBlockCacheReporting.class); + private static final Logger LOG = LoggerFactory.getLogger(TestBlockCacheReporting.class); private Configuration conf; @Before @@ -86,9 +88,9 @@ public class TestBlockCacheReporting { final int count = 3; addDataAndHits(cc.getBlockCache(), count); // The below has no asserts. It is just exercising toString and toJSON code. - LOG.info(cc.getBlockCache().getStats()); + LOG.info(Objects.toString(cc.getBlockCache().getStats())); BlockCacheUtil.CachedBlocksByFile cbsbf = logPerBlock(cc.getBlockCache()); - LOG.info(cbsbf); + LOG.info(Objects.toString(cbsbf)); logPerFile(cbsbf); bucketCacheReport(cc.getBlockCache()); LOG.info(BlockCacheUtil.toJSON(cbsbf)); @@ -106,9 +108,9 @@ public class TestBlockCacheReporting { BlockCache bc = cc.getBlockCache(); LOG.info("count=" + bc.getBlockCount() + ", currentSize=" + bc.getCurrentSize() + ", freeSize=" + bc.getFreeSize() ); - LOG.info(cc.getBlockCache().getStats()); + LOG.info(Objects.toString(cc.getBlockCache().getStats())); BlockCacheUtil.CachedBlocksByFile cbsbf = logPerBlock(cc.getBlockCache()); - LOG.info(cbsbf); + LOG.info(Objects.toString(cbsbf)); logPerFile(cbsbf); bucketCacheReport(cc.getBlockCache()); LOG.info(BlockCacheUtil.toJSON(cbsbf)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index e1ae654b5bb..e07d2aa531d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -28,8 +28,6 @@ import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,6 +47,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests that {@link CacheConfig} does as expected. @@ -59,7 +59,7 @@ import org.junit.experimental.categories.Category; // tests clash on the global variable if this test is run as small sized test. @Category({IOTests.class, LargeTests.class}) public class TestCacheConfig { - private static final Log LOG = LogFactory.getLog(TestCacheConfig.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCacheConfig.class); private Configuration conf; static class Deserializer implements CacheableDeserializer { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 9535a461c45..74a310d9808 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -34,8 +34,6 @@ import java.util.List; import java.util.Map; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -71,7 +69,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -82,7 +81,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({IOTests.class, MediumTests.class}) public class TestCacheOnWrite { - private static final Log LOG = LogFactory.getLog(TestCacheOnWrite.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCacheOnWrite.class); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); private Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java index 5111e365ea4..d48c5f3b0d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java @@ -34,8 +34,6 @@ import java.util.Arrays; import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -52,10 +50,12 @@ import org.apache.hadoop.hbase.util.ChecksumType; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({IOTests.class, SmallTests.class}) public class TestChecksum { - private static final Log LOG = LogFactory.getLog(TestHFileBlock.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlock.class); static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { NONE, GZ }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java index fcc09d550a9..8b2a3af4050 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java @@ -31,8 +31,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -47,12 +45,14 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @RunWith(Parameterized.class) @Category({IOTests.class, SmallTests.class}) public class TestFixedFileTrailer { - private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFixedFileTrailer.class); private static final int MAX_COMPARATOR_NAME_LENGTH = 128; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index cc4a71683e2..30501e2e08b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -29,10 +29,9 @@ import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.Objects; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -65,6 +64,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * test hfile features. @@ -74,7 +75,7 @@ public class TestHFile { @Rule public TestName testName = new TestName(); - private static final Log LOG = LogFactory.getLog(TestHFile.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFile.class); private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static String ROOT_DIR = @@ -322,7 +323,7 @@ public class TestHFile { .withFileContext(meta) .withComparator(CellComparatorImpl.COMPARATOR) .create(); - LOG.info(writer); + LOG.info(Objects.toString(writer)); writeRecords(writer, useTags); fout.close(); FSDataInputStream fin = fs.open(ncHFile); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 8a2d721b5a0..9c36788940d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -40,8 +40,6 @@ import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -75,6 +73,8 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({IOTests.class, MediumTests.class}) @RunWith(Parameterized.class) @@ -83,7 +83,7 @@ public class TestHFileBlock { private static final boolean detailedLogging = false; private static final boolean[] BOOLEAN_VALUES = new boolean[] { false, true }; - private static final Log LOG = LogFactory.getLog(TestHFileBlock.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlock.class); static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { NONE, GZ }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index c5bc9d74763..ad42a66ed78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -35,8 +35,6 @@ import java.util.List; import java.util.Random; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -68,6 +66,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @RunWith(Parameterized.class) @Category({IOTests.class, MediumTests.class}) @@ -82,7 +82,7 @@ public class TestHFileBlockIndex { this.compr = compr; } - private static final Log LOG = LogFactory.getLog(TestHFileBlockIndex.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileBlockIndex.class); private static final int NUM_DATA_BLOCKS = 1000; private static final HBaseTestingUtility TEST_UTIL = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index 2dd00731a4f..bec774ee0a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -30,8 +30,6 @@ import java.security.SecureRandom; import java.util.List; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -54,10 +52,12 @@ import org.apache.hadoop.hbase.util.RedundantKVGenerator; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({IOTests.class, SmallTests.class}) public class TestHFileEncryption { - private static final Log LOG = LogFactory.getLog(TestHFileEncryption.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileEncryption.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final SecureRandom RNG = new SecureRandom(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java index e4a3908e48a..710fe43fe83 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java @@ -32,8 +32,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.io.BytesWritable; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * test the performance for seek. @@ -73,7 +73,7 @@ public class TestHFileSeek extends TestCase { private RandomDistribution.DiscreteRNG keyLenGen; private KVGenerator kvGen; - private static final Log LOG = LogFactory.getLog(TestHFileSeek.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileSeek.class); @Override public void setUp() throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index 5f320c80db5..5dade74a7b5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -31,8 +31,6 @@ import java.util.Collection; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -62,6 +60,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing writing a version 3 {@link HFile}. @@ -70,7 +70,7 @@ import org.junit.runners.Parameterized.Parameters; @Category({IOTests.class, SmallTests.class}) public class TestHFileWriterV3 { - private static final Log LOG = LogFactory.getLog(TestHFileWriterV3.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileWriterV3.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java index d92453a03a5..42cc6e53b74 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java @@ -17,9 +17,17 @@ */ package org.apache.hadoop.hbase.io.hfile; -import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Random; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,15 +46,10 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Random; - -import static org.junit.Assert.*; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * A kind of integration test at the intersection of {@link HFileBlock}, {@link CacheConfig}, @@ -55,7 +58,7 @@ import static org.junit.Assert.*; @Category({IOTests.class, SmallTests.class}) @RunWith(Parameterized.class) public class TestLazyDataBlockDecompression { - private static final Log LOG = LogFactory.getLog(TestLazyDataBlockDecompression.class); + private static final Logger LOG = LoggerFactory.getLogger(TestLazyDataBlockDecompression.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private FileSystem fs; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java index 7e2a0a52b82..106a8795446 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferKeyValue; @@ -56,10 +54,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ RegionServerTests.class, MediumTests.class }) public class TestScannerFromBucketCache { - private static final Log LOG = LogFactory.getLog(TestScannerFromBucketCache.class); + private static final Logger LOG = LoggerFactory.getLogger(TestScannerFromBucketCache.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index 459deeb5e4f..5e88b14f033 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -24,8 +24,6 @@ import java.util.Collection; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -48,6 +46,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the optimization that does not scan files where all timestamps are @@ -57,8 +57,8 @@ import org.junit.runners.Parameterized.Parameters; @Category({IOTests.class, MediumTests.class}) public class TestScannerSelectionUsingTTL { - private static final Log LOG = - LogFactory.getLog(TestScannerSelectionUsingTTL.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestScannerSelectionUsingTTL.class); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); private static TableName TABLE = TableName.valueOf("myTable"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java index f4309eaf9ad..651cf020372 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -44,11 +42,13 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({IOTests.class, MediumTests.class}) public class TestSeekBeforeWithInlineBlocks { - private static final Log LOG = LogFactory.getLog(TestSeekBeforeWithInlineBlocks.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSeekBeforeWithInlineBlocks.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java index 90fb2f3151c..6cb63a6c381 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java @@ -36,8 +36,6 @@ import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; @@ -61,7 +59,8 @@ import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.util.StringUtils; import org.junit.BeforeClass; import org.junit.Test; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -70,7 +69,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ public abstract class AbstractTestIPC { - private static final Log LOG = LogFactory.getLog(AbstractTestIPC.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractTestIPC.class); private static final byte[] CELL_BYTES = Bytes.toBytes("xyz"); private static final KeyValue CELL = new KeyValue(CELL_BYTES, CELL_BYTES, CELL_BYTES, CELL_BYTES); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java index 8611e48caf5..119a28c9c3f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestFifoRpcScheduler.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.ipc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -32,6 +30,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.lang.reflect.Field; @@ -52,7 +52,7 @@ public class TestFifoRpcScheduler { CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestFifoRpcScheduler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFifoRpcScheduler.class); private AtomicInteger callExecutionCount; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java index 20687ab424e..06154cd5d68 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java @@ -27,8 +27,6 @@ import java.net.Socket; import java.net.SocketAddress; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -49,6 +47,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) public class TestRpcClientLeaks { @@ -99,7 +99,7 @@ public class TestRpcClientLeaks { UTIL.shutdownMiniCluster(); } - public static final Log LOG = LogFactory.getLog(TestRpcClientLeaks.class); + public static final Logger LOG = LoggerFactory.getLogger(TestRpcClientLeaks.class); @Test(expected=RetriesExhaustedException.class) public void testSocketClosed() throws IOException, InterruptedException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java index 8521e653b20..9e59bb9c8e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java @@ -46,8 +46,6 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -71,6 +69,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RPCTests.class, SmallTests.class}) public class TestSimpleRpcScheduler { @@ -79,7 +79,7 @@ public class TestSimpleRpcScheduler { CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestSimpleRpcScheduler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSimpleRpcScheduler.class); private final RpcScheduler.Context CONTEXT = new RpcScheduler.Context() { @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java index 36624e8fbfd..f3d53c152ad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/AbstractTestDLS.java @@ -43,8 +43,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.LongAdder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -92,14 +90,15 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; /** * Base class for testing distributed log splitting. */ public abstract class AbstractTestDLS { - private static final Log LOG = LogFactory.getLog(TestSplitLogManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSplitLogManager.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java index 9b6cd165657..d813dfb5114 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.concurrent.Semaphore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; @@ -51,13 +49,15 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the {@link ActiveMasterManager}. */ @Category({MasterTests.class, MediumTests.class}) public class TestActiveMasterManager { - private final static Log LOG = LogFactory.getLog(TestActiveMasterManager.class); + private final static Logger LOG = LoggerFactory.getLogger(TestActiveMasterManager.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @BeforeClass @@ -226,7 +226,7 @@ public class TestActiveMasterManager { } public static class NodeDeletionListener extends ZKListener { - private static final Log LOG = LogFactory.getLog(NodeDeletionListener.class); + private static final Logger LOG = LoggerFactory.getLogger(NodeDeletionListener.class); private Semaphore lock; private String node; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java index 7f8e9c9ee12..34a1ccf6aa7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java @@ -26,8 +26,6 @@ import java.util.HashMap; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -58,10 +56,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestAssignmentListener { - private static final Log LOG = LogFactory.getLog(TestAssignmentListener.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentListener.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java index fbbebcc4b00..717933aa761 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManagerMetrics.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -40,13 +38,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.fail; @Category(MediumTests.class) public class TestAssignmentManagerMetrics { - private static final Log LOG = LogFactory.getLog(TestAssignmentManagerMetrics.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentManagerMetrics.class); private static final MetricsAssertHelper metricsHelper = CompatibilityFactory .getInstance(MetricsAssertHelper.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java index fcdf4d6ba5c..da501006d1c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java @@ -28,13 +28,12 @@ import static org.mockito.Mockito.spy; import java.io.IOException; import java.util.Map; import java.util.NavigableMap; +import java.util.Objects; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.concurrent.ConcurrentSkipListMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -62,7 +61,6 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; -import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Triple; import org.junit.After; import org.junit.Before; @@ -72,10 +70,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestCatalogJanitor { - private static final Log LOG = LogFactory.getLog(TestCatalogJanitor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCatalogJanitor.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); @Rule public final TestName name = new TestName(); @@ -478,7 +478,7 @@ public class TestCatalogJanitor { private void logFiles(String description, FileStatus[] storeFiles) { LOG.debug("Current " + description + ": "); for (FileStatus file : storeFiles) { - LOG.debug(file.getPath()); + LOG.debug(Objects.toString(file.getPath())); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java index 28ed6a8c2b1..c42d042a43f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -56,10 +54,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestCatalogJanitorInMemoryStates { - private static final Log LOG = LogFactory.getLog(TestCatalogJanitorInMemoryStates.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCatalogJanitorInMemoryStates.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); @Rule public final TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java index bd7c5073f96..77f038af066 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java @@ -24,8 +24,6 @@ import static org.mockito.Mockito.when; import java.net.InetAddress; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClockOutOfSyncException; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -36,11 +34,13 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestClockSkewDetection { - private static final Log LOG = - LogFactory.getLog(TestClockSkewDetection.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestClockSkewDetection.class); @Test public void testClockSkewDetection() throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java index 4b2c91150bb..fe7bc10f085 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -45,13 +43,15 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({ MasterTests.class, MediumTests.class }) public class TestHMasterRPCException { - private static final Log LOG = LogFactory.getLog(TestHMasterRPCException.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHMasterRPCException.class); private final HBaseTestingUtility testUtil = HBaseTestingUtility.createLocalHTU(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java index eed793f0990..07a21beb40a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java @@ -26,8 +26,6 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -55,13 +53,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; @Category({MasterTests.class, MediumTests.class}) public class TestMaster { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestMaster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMaster.class); private static final TableName TABLENAME = TableName.valueOf("TestMaster"); private static final byte[] FAMILYNAME = Bytes.toBytes("fam"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java index 18378ace520..ff813eba77a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -42,10 +40,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({FlakeyTests.class, LargeTests.class}) public class TestMasterFailover { - private static final Log LOG = LogFactory.getLog(TestMasterFailover.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterFailover.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()). diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java index bf13e7fc5dc..4d2a885fa23 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java @@ -21,8 +21,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -34,13 +32,15 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the master filesystem in a local cluster */ @Category({MasterTests.class, MediumTests.class}) public class TestMasterFileSystem { - private static final Log LOG = LogFactory.getLog(TestMasterFileSystem.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterFileSystem.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java index b300818047a..9b848235146 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; import org.apache.hadoop.hbase.CoordinatedStateManager; @@ -38,11 +36,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestMasterMetrics { - private static final Log LOG = LogFactory.getLog(TestMasterMetrics.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterMetrics.class); private static final MetricsAssertHelper metricsHelper = CompatibilityFactory .getInstance(MetricsAssertHelper.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java index 9acd2f7500d..1b5175a0cee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetricsWrapper.java @@ -21,8 +21,6 @@ import static org.junit.Assert.*; import java.util.AbstractMap.SimpleImmutableEntry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot; import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus; @@ -34,10 +32,12 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestMasterMetricsWrapper { - private static final Log LOG = LogFactory.getLog(TestMasterMetricsWrapper.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterMetricsWrapper.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final int NUM_RS = 4; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java index 20c9fe1f85e..7a2817e0887 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java @@ -27,8 +27,6 @@ import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -66,6 +64,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Standup the master and fake it to test various aspects of master function. @@ -77,7 +77,7 @@ import org.mockito.Mockito; */ @Category({MasterTests.class, MediumTests.class}) public class TestMasterNoCluster { - private static final Log LOG = LogFactory.getLog(TestMasterNoCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterNoCluster.class); private static final HBaseTestingUtility TESTUTIL = new HBaseTestingUtility(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java index dd0fada0f14..43c258ed8e5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java @@ -32,8 +32,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus.Option; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -66,10 +64,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestMasterOperationsForRegionReplicas { - private static final Log LOG = LogFactory.getLog(TestRegionPlacement.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionPlacement.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Connection CONNECTION = null; private static Admin ADMIN; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java index fc49c4441fb..2af6255c0e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; import java.util.NavigableSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -43,11 +41,14 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, LargeTests.class}) public class TestMasterRestartAfterDisablingTable { - private static final Log LOG = LogFactory.getLog(TestMasterRestartAfterDisablingTable.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMasterRestartAfterDisablingTable.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java index ebf16b7007d..0eff8df289d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -41,10 +39,12 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, LargeTests.class}) public class TestMasterShutdown { - private static final Log LOG = LogFactory.getLog(TestMasterShutdown.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterShutdown.class); /** * Simple test of shutdown. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java index 042a4621290..65351bdc7c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -44,6 +42,8 @@ import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test transitions of state across the master. Sets up the cluster once and @@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category; */ @Category({MasterTests.class, LargeTests.class}) public class TestMasterTransitions { - private static final Log LOG = LogFactory.getLog(TestMasterTransitions.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterTransitions.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLENAME = TableName.valueOf("master_transitions"); private static final byte [][] FAMILIES = new byte [][] {Bytes.toBytes("a"), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java index 0c9e33ebc4f..e99d533e18e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java @@ -34,8 +34,6 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -70,10 +68,12 @@ import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestRegionPlacement { - private static final Log LOG = LogFactory.getLog(TestRegionPlacement.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionPlacement.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static int SLAVES = 10; private static Connection CONNECTION; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java index 87f5ba3ab58..835e274e8d2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement2.java @@ -25,8 +25,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -47,10 +45,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestRegionPlacement2 { - private static final Log LOG = LogFactory.getLog(TestRegionPlacement2.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionPlacement2.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static int SLAVES = 7; private final static int PRIMARY = Position.PRIMARY.ordinal(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java index 9809090003b..b871bb12ab2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -44,10 +42,12 @@ import org.apache.hadoop.hbase.util.Threads; import org.junit.After; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, LargeTests.class}) public class TestRestartCluster { - private static final Log LOG = LogFactory.getLog(TestRestartCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRestartCluster.class); private HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final TableName[] TABLES = { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java index 89feadf8ce8..8953147b766 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java @@ -26,8 +26,6 @@ import java.util.NavigableSet; import java.util.Set; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -46,7 +44,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; /** @@ -54,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; */ @Category({MasterTests.class, LargeTests.class}) public class TestRollingRestart { - private static final Log LOG = LogFactory.getLog(TestRollingRestart.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRollingRestart.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java index d74b7320da9..cd5239edbe4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java @@ -40,8 +40,6 @@ import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.LongAdder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -62,8 +60,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooDefs.Ids; @@ -73,17 +69,15 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestSplitLogManager { - private static final Log LOG = LogFactory.getLog(TestSplitLogManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSplitLogManager.class); private final ServerManager sm = Mockito.mock(ServerManager.class); - static { - Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.DEBUG); - } - private ZKWatcher zkw; private DummyMasterServices master; private SplitLogManager slm; @@ -540,7 +534,7 @@ public class TestSplitLogManager { try { ZKUtil.setData(zkw, entry.getKey(), slt.toByteArray()); } catch (KeeperException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); encounteredZKException = true; } if (!encounteredZKException) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java index a3de52d500a..5cb82a27e22 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HTableDescriptor; @@ -46,6 +44,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Run tests that use the HBase clients; {@link org.apache.hadoop.hbase.client.HTable}. @@ -55,7 +55,7 @@ import org.junit.experimental.categories.Category; @Category({MasterTests.class, LargeTests.class}) @SuppressWarnings ("deprecation") public class TestWarmupRegion { - private static final Log LOG = LogFactory.getLog(TestWarmupRegion.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWarmupRegion.class); protected TableName TABLENAME = TableName.valueOf("testPurgeFutureDeletes"); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte [] ROW = Bytes.toBytes("testRow"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java index 459e4ee5fa0..3799d739181 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/AssignmentTestingUtil.java @@ -21,8 +21,6 @@ import static org.junit.Assert.assertEquals; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.Waiter.ExplainingPredicate; @@ -31,11 +29,13 @@ import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.util.Threads; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Evolving public abstract class AssignmentTestingUtil { - private static final Log LOG = LogFactory.getLog(AssignmentTestingUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(AssignmentTestingUtil.class); private AssignmentTestingUtil() {} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java index f4365eac879..3c453bcf4c5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentManager.java @@ -37,8 +37,6 @@ import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -54,7 +52,6 @@ import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.RegionState.State; import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants; -import org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler; import org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait; import org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher; import org.apache.hadoop.hbase.procedure2.Procedure; @@ -69,8 +66,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.ipc.RemoteException; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Ignore; @@ -80,7 +75,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse; @@ -96,10 +92,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto @Category({MasterTests.class, MediumTests.class}) public class TestAssignmentManager { - private static final Log LOG = LogFactory.getLog(TestAssignmentManager.class); - static { - Logger.getLogger(MasterProcedureScheduler.class).setLevel(Level.TRACE); - } + private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentManager.class); + @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). @@ -534,6 +528,7 @@ public class TestAssignmentManager { } private class NoopRsExecutor implements MockRSExecutor { + @Override public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest request) throws IOException { ExecuteProceduresResponse.Builder builder = ExecuteProceduresResponse.newBuilder(); @@ -602,6 +597,7 @@ public class TestAssignmentManager { } private static class ServerNotYetRunningRsExecutor implements MockRSExecutor { + @Override public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req) throws IOException { throw new ServerNotRunningYetException("wait on server startup"); @@ -615,6 +611,7 @@ public class TestAssignmentManager { this.exception = exception; } + @Override public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req) throws IOException { throw exception; @@ -634,6 +631,7 @@ public class TestAssignmentManager { this.maxSocketTimeoutRetries = maxSocketTimeoutRetries; } + @Override public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req) throws IOException { // SocketTimeoutException should be a temporary problem @@ -746,6 +744,7 @@ public class TestAssignmentManager { private class RandRsExecutor extends NoopRsExecutor { private final Random rand = new Random(); + @Override public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req) throws IOException { switch (rand.nextInt(5)) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java index 72fb7ad12e6..d33eab7cabe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestAssignmentOnRSCrash.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.ServerName; @@ -43,10 +41,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, LargeTests.class}) public class TestAssignmentOnRSCrash { - private static final Log LOG = LogFactory.getLog(TestAssignmentOnRSCrash.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAssignmentOnRSCrash.class); private static final TableName TEST_TABLE = TableName.valueOf("testb"); private static final String FAMILY_STR = "f"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java index beb53eccc96..57882dfb4f6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestMergeTableRegionsProcedure.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -54,10 +52,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestMergeTableRegionsProcedure { - private static final Log LOG = LogFactory.getLog(TestMergeTableRegionsProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMergeTableRegionsProcedure.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); @Rule public final TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java index bd131300aee..587ebc07860 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRegionStates.java @@ -28,8 +28,6 @@ import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -45,10 +43,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestRegionStates { - private static final Log LOG = LogFactory.getLog(TestRegionStates.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionStates.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java index 49eb5735f62..056e66d3e6e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestRogueRSAssignment.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.master.assignment; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; @@ -51,6 +49,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; @@ -64,7 +64,7 @@ import static org.junit.Assert.assertNotNull; */ @Category({MasterTests.class, MediumTests.class}) public class TestRogueRSAssignment { - private static final Log LOG = LogFactory.getLog(TestRogueRSAssignment.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRogueRSAssignment.class); @Rule public final TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java index 1cdd9c59fce..37d982082e0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java @@ -25,8 +25,6 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; @@ -63,10 +61,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestSplitTableRegionProcedure { - private static final Log LOG = LogFactory.getLog(TestSplitTableRegionProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSplitTableRegionProcedure.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java index ee2e433fc03..adf56b8fe89 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/BalancerTestBase.java @@ -35,8 +35,6 @@ import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.ServerName; @@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.net.DNSToSwitchMapping; import org.junit.Assert; import org.junit.BeforeClass; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class used to be the base of unit tests on load balancers. It gives helper @@ -58,7 +58,7 @@ import org.junit.BeforeClass; * */ public class BalancerTestBase { - private static final Log LOG = LogFactory.getLog(BalancerTestBase.class); + private static final Logger LOG = LoggerFactory.getLogger(BalancerTestBase.class); protected static Random rand = new Random(); static int regionId = 0; protected static Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java index 2b40ea73629..2aaa3af20ca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java @@ -28,8 +28,6 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.master.LoadBalancer; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch; @@ -55,8 +54,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class LoadBalancerPerformanceEvaluation extends AbstractHBaseTool { - private static final Log LOG = - LogFactory.getLog(LoadBalancerPerformanceEvaluation.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(LoadBalancerPerformanceEvaluation.class.getName()); protected static final HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java index c33cd56e4ae..b7fe71f5d39 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java @@ -35,8 +35,6 @@ import java.util.TreeSet; import java.util.stream.Collectors; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseIOException; @@ -61,14 +59,15 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MasterTests.class, MediumTests.class}) public class TestBaseLoadBalancer extends BalancerTestBase { private static LoadBalancer loadBalancer; - private static final Log LOG = LogFactory.getLog(TestBaseLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestBaseLoadBalancer.class); private static final ServerName master = ServerName.valueOf("fake-master", 0, 1L); private static RackManager rackManager; private static final int NUM_SERVERS = 15; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java index 4d09bf8147a..4b500c96198 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestDefaultLoadBalancer.java @@ -25,8 +25,6 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.ServerName; @@ -43,13 +41,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the load balancer that is created by default. */ @Category({MasterTests.class, MediumTests.class}) public class TestDefaultLoadBalancer extends BalancerTestBase { - private static final Log LOG = LogFactory.getLog(TestDefaultLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDefaultLoadBalancer.class); private static LoadBalancer loadBalancer; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java index 3f39f98bb33..cec2968e57c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertEquals; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.junit.After; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /* * This case tests a scenario when a cluster with tables is moved from Stochastic Load Balancer @@ -53,7 +53,7 @@ import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestFavoredNodeTableImport { - private static final Log LOG = LogFactory.getLog(TestFavoredNodeTableImport.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFavoredNodeTableImport.class); private static final int SLAVES = 3; private static final int REGION_NUM = 20; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java index 5a0951997d2..fb0dea78275 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java @@ -28,8 +28,6 @@ import java.util.EnumSet; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus.Option; @@ -59,6 +57,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -71,7 +71,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @Category(LargeTests.class) public class TestFavoredStochasticBalancerPickers extends BalancerTestBase { - private static final Log LOG = LogFactory.getLog(TestFavoredStochasticBalancerPickers.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFavoredStochasticBalancerPickers.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final int SLAVES = 6; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java index d6f559f17dd..6541f15c592 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java @@ -31,8 +31,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus.Option; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -54,7 +52,6 @@ import org.apache.hadoop.hbase.master.ServerManager; import org.apache.hadoop.hbase.master.assignment.RegionStates; import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; @@ -64,7 +61,8 @@ import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @@ -72,7 +70,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @Category(MediumTests.class) public class TestFavoredStochasticLoadBalancer extends BalancerTestBase { - private static final Log LOG = LogFactory.getLog(TestFavoredStochasticLoadBalancer.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFavoredStochasticLoadBalancer.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final int SLAVES = 8; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java index 58c33331dbf..a8e78d7ae97 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRegionsOnMasterOptions.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.master.balancer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Table; @@ -36,6 +34,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.List; @@ -49,7 +49,7 @@ import static org.junit.Assert.assertTrue; */ @Category({MediumTests.class}) public class TestRegionsOnMasterOptions { - private static final Log LOG = LogFactory.getLog(TestRegionsOnMasterOptions.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionsOnMasterOptions.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java index 68d009dfa01..1e6f1dd403a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer.java @@ -34,8 +34,6 @@ import java.util.Map.Entry; import java.util.Queue; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -55,11 +53,13 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({FlakeyTests.class, MediumTests.class}) public class TestStochasticLoadBalancer extends BalancerTestBase { public static final String REGION_KEY = "testRegion"; - private static final Log LOG = LogFactory.getLog(TestStochasticLoadBalancer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestStochasticLoadBalancer.class); // Mapping of locality test -> expected locality private float[] expectedLocalities = {1.0f, 0.0f, 0.50f, 0.25f, 1.0f}; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java index 0b69a4a0527..637c2f6dac1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestStochasticLoadBalancer2.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.master.balancer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.testclassification.FlakeyTests; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -28,10 +26,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({FlakeyTests.class, LargeTests.class}) public class TestStochasticLoadBalancer2 extends BalancerTestBase { - private static final Log LOG = LogFactory.getLog(TestStochasticLoadBalancer2.class); + private static final Logger LOG = LoggerFactory.getLogger(TestStochasticLoadBalancer2.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder() .withTimeout(this.getClass()) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java index 39bdbc7e6aa..a9a856b9058 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -43,11 +41,13 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestCleanerChore { - private static final Log LOG = LogFactory.getLog(TestCleanerChore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCleanerChore.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); @After diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java index a08b0c7ee3b..45204993212 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -51,10 +49,12 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestHFileCleaner { - private static final Log LOG = LogFactory.getLog(TestHFileCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHFileCleaner.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java index 34e81db4284..43fc6a4f339 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java @@ -31,8 +31,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; @@ -68,11 +66,13 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestLogsCleaner { - private static final Log LOG = LogFactory.getLog(TestLogsCleaner.class); + private static final Logger LOG = LoggerFactory.getLogger(TestLogsCleaner.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @BeforeClass diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index 37c9221fd42..2f233016a74 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -25,8 +25,6 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -66,10 +64,12 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ MasterTests.class, SmallTests.class }) public class TestReplicationHFileCleaner { - private static final Log LOG = LogFactory.getLog(ReplicationQueuesZKImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(ReplicationQueuesZKImpl.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Server server; private static ReplicationQueues rq; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 756152ed412..9e76876fc2e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -26,8 +26,6 @@ import java.util.Collection; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -67,7 +65,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; @@ -79,7 +78,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MasterTests.class, MediumTests.class}) public class TestSnapshotFromMaster { - private static final Log LOG = LogFactory.getLog(TestSnapshotFromMaster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromMaster.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final int NUM_RS = 2; private static Path rootDir; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java index e2e97dc033f..80a9b7b2484 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; @@ -47,6 +45,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestLockManager { @@ -55,7 +55,7 @@ public class TestLockManager { // crank this up if this test turns out to be flaky. private static final int LOCAL_LOCKS_TIMEOUT = 1000; - private static final Log LOG = LogFactory.getLog(TestLockProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestLockProcedure.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static MasterServices masterServices; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java index a817bd5b379..28c48d974b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java @@ -28,8 +28,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -60,7 +58,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest; @@ -81,7 +80,7 @@ public class TestLockProcedure { private static final int HEARTBEAT_TIMEOUT = 2000; private static final int LOCAL_LOCKS_TIMEOUT = 4000; - private static final Log LOG = LogFactory.getLog(TestLockProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestLockProcedure.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static MasterRpcServices masterRpcService; private static ProcedureExecutor procExec; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java index 0936c1643cd..0a8d74909c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java @@ -29,8 +29,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.ServerName; @@ -50,6 +48,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -57,7 +57,7 @@ import org.mockito.Mockito; */ @Category({MasterTests.class, SmallTests.class}) public class TestSimpleRegionNormalizer { - private static final Log LOG = LogFactory.getLog(TestSimpleRegionNormalizer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSimpleRegionNormalizer.class); private static RegionNormalizer normalizer; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java index 8fe53af3d5c..5236c0465be 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java @@ -25,8 +25,6 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; @@ -55,13 +53,16 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing {@link SimpleRegionNormalizer} on minicluster. */ @Category({MasterTests.class, MediumTests.class}) public class TestSimpleRegionNormalizerOnCluster { - private static final Log LOG = LogFactory.getLog(TestSimpleRegionNormalizerOnCluster.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSimpleRegionNormalizerOnCluster.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] FAMILYNAME = Bytes.toBytes("fam"); private static Admin admin; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java index c68c01ffb48..243bb1487d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java @@ -28,8 +28,6 @@ import java.util.TreeSet; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -63,10 +61,12 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.MD5Hash; import org.apache.hadoop.hbase.util.ModifyRegionUtils; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private public class MasterProcedureTestingUtility { - private static final Log LOG = LogFactory.getLog(MasterProcedureTestingUtility.class); + private static final Logger LOG = LoggerFactory.getLogger(MasterProcedureTestingUtility.class); private MasterProcedureTestingUtility() { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java index b68a2f747a7..490501a866f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCloneSnapshotProcedure.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableExistsException; @@ -42,10 +40,12 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestCloneSnapshotProcedure extends TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestCloneSnapshotProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCloneSnapshotProcedure.class); protected final byte[] CF = Bytes.toBytes("cf1"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java index 81942e1a9e4..32a757e6345 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -43,10 +41,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestCreateNamespaceProcedure { - private static final Log LOG = LogFactory.getLog(TestCreateNamespaceProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCreateNamespaceProcedure.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java index c57f210be26..e54eb661623 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; @@ -41,10 +39,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestCreateTableProcedure extends TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestCreateTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCreateTableProcedure.class); private static final String F1 = "f1"; private static final String F2 = "f2"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java index dcfed2991e7..fa2507e926f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HTableDescriptor; @@ -46,10 +44,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestDeleteNamespaceProcedure { - private static final Log LOG = LogFactory.getLog(TestDeleteNamespaceProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDeleteNamespaceProcedure.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java index 6f109e516a6..9f4c1803f44 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotDisabledException; @@ -38,10 +36,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestDeleteTableProcedure extends TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestDeleteTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDeleteTableProcedure.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java index 9c5970cbbca..e49bfcbbce8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotEnabledException; @@ -37,10 +35,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestDisableTableProcedure extends TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestDisableTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDisableTableProcedure.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java index 66071d3fd04..8364dfea5d4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotDisabledException; @@ -37,10 +35,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestEnableTableProcedure extends TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestEnableTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestEnableTableProcedure.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java index 3eeb382ad52..31faa083b9e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertEquals; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -44,7 +42,8 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableState; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableState; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableState; @@ -53,7 +52,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.T @Category({MasterTests.class, LargeTests.class}) public class TestMasterFailoverWithProcedures { - private static final Log LOG = LogFactory.getLog(TestMasterFailoverWithProcedures.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterFailoverWithProcedures.class); @ClassRule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java index b0a598ef0d1..3cb5e646a0e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertEquals; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -45,10 +43,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestMasterProcedureEvents { - private static final Log LOG = LogFactory.getLog(TestCreateTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCreateTableProcedure.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java index d971b5fb26a..0291165848e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java @@ -24,8 +24,7 @@ import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; @@ -47,10 +46,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestMasterProcedureScheduler { - private static final Log LOG = LogFactory.getLog(TestMasterProcedureScheduler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterProcedureScheduler.class); private MasterProcedureScheduler queue; private Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java index af48f641eaa..2e8e52ae24e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.HashSet; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; @@ -37,13 +35,16 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @Category({MasterTests.class, MediumTests.class}) public class TestMasterProcedureSchedulerConcurrency { - private static final Log LOG = LogFactory.getLog(TestMasterProcedureSchedulerConcurrency.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMasterProcedureSchedulerConcurrency.class); private MasterProcedureScheduler queue; private Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java index e0452c2f2a3..ff2303acc30 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java @@ -25,8 +25,6 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -52,11 +50,13 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, LargeTests.class}) @Ignore public class TestMasterProcedureWalLease { - private static final Log LOG = LogFactory.getLog(TestMasterProcedureWalLease.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterProcedureWalLease.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java index c1b28967b4f..8dec59d0e82 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HTableDescriptor; @@ -42,10 +40,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestModifyNamespaceProcedure { - private static final Log LOG = LogFactory.getLog(TestModifyNamespaceProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestModifyNamespaceProcedure.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java index bb531ce0c20..f47654f7ea3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -46,10 +44,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestProcedureAdmin { - private static final Log LOG = LogFactory.getLog(TestProcedureAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureAdmin.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java index 13146f72b20..3946ee99276 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java @@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; @@ -52,10 +50,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestRestoreSnapshotProcedure extends TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestRestoreSnapshotProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRestoreSnapshotProcedure.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java index 08070adf9f5..389ed62e6b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -44,10 +42,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) public class TestSafemodeBringsDownMaster { - private static final Log LOG = LogFactory.getLog(TestSafemodeBringsDownMaster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSafemodeBringsDownMaster.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java index 627cbe8ffd4..c1d1812c4c6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; @@ -41,10 +39,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, LargeTests.class}) public class TestServerCrashProcedure { - private static final Log LOG = LogFactory.getLog(TestServerCrashProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestServerCrashProcedure.class); private HBaseTestingUtility util; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java index f7b4100b526..f7cf6403798 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hbase.master.procedure; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HTableDescriptor; @@ -30,10 +28,13 @@ import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertTrue; public abstract class TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestTableDDLProcedureBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableDDLProcedureBase.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static void setupConf(Configuration conf) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java index 2368af55619..b70bdd554ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.master.procedure; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotDisabledException; @@ -39,10 +37,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, MediumTests.class}) public class TestTruncateTableProcedure extends TestTableDDLProcedureBase { - private static final Log LOG = LogFactory.getLog(TestTruncateTableProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTruncateTableProcedure.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java index 157d08bd40e..2834b8f4048 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java @@ -20,11 +20,10 @@ package org.apache.hadoop.hbase.master.procedure; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure; import org.apache.hadoop.hbase.procedure2.store.ProcedureStore; @@ -39,6 +38,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -46,7 +47,7 @@ import static org.junit.Assert.assertTrue; @Category({MasterTests.class, LargeTests.class}) public class TestWALProcedureStoreOnHDFS { - private static final Log LOG = LogFactory.getLog(TestWALProcedureStoreOnHDFS.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedureStoreOnHDFS.class); protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); @@ -58,7 +59,7 @@ public class TestWALProcedureStoreOnHDFS { @Override public void abortProcess() { - LOG.fatal("Abort the Procedure Store"); + LOG.error(HBaseMarkers.FATAL, "Abort the Procedure Store"); store.stop(true); } }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java index 15e3c9a61ca..fdc5b446571 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java @@ -32,8 +32,6 @@ import java.util.List; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -50,7 +48,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; @@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; @Category({MasterTests.class, MediumTests.class}) public class TestSnapshotFileCache { - private static final Log LOG = LogFactory.getLog(TestSnapshotFileCache.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFileCache.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static long sequenceId = 0; private static FileSystem fs; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java index fba250da9ba..ad6c58e4eb9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.util.Collection; import java.util.HashSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,6 +44,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test that the snapshot hfile cleaner finds hfiles referenced in a snapshot @@ -53,7 +53,7 @@ import org.junit.rules.TestName; @Category({MasterTests.class, SmallTests.class}) public class TestSnapshotHFileCleaner { - private static final Log LOG = LogFactory.getLog(TestSnapshotFileCache.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFileCache.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final String TABLE_NAME_STR = "testSnapshotManifest"; private static final String SNAPSHOT_NAME_STR = "testSnapshotManifest-snapshot"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java index 61f1cced625..32e65220d82 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob; import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,10 +36,12 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(SmallTests.class) public class TestCachedMobFile extends TestCase{ - static final Log LOG = LogFactory.getLog(TestCachedMobFile.class); + static final Logger LOG = LoggerFactory.getLogger(TestCachedMobFile.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private Configuration conf = TEST_UTIL.getConfiguration(); private CacheConfig cacheConf = new CacheConfig(conf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java index 9b69411a31f..f894fb25112 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFile.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mob; import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -40,10 +38,12 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(SmallTests.class) public class TestMobFile extends TestCase { - static final Log LOG = LogFactory.getLog(TestMobFile.class); + static final Logger LOG = LoggerFactory.getLogger(TestMobFile.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private Configuration conf = TEST_UTIL.getConfiguration(); private CacheConfig cacheConf = new CacheConfig(conf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java index d4289862cfa..42e652867d3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileCache.java @@ -23,8 +23,6 @@ import java.util.Date; import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -42,10 +40,12 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(SmallTests.class) public class TestMobFileCache extends TestCase { - static final Log LOG = LogFactory.getLog(TestMobFileCache.class); + static final Logger LOG = LoggerFactory.getLogger(TestMobFileCache.class); private HBaseTestingUtility UTIL; private HRegion region; private Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java index 54071d081d5..8c974e676a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestMobCompactor.java @@ -29,6 +29,7 @@ import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.Random; import java.util.concurrent.ExecutorService; @@ -40,8 +41,6 @@ import java.util.concurrent.TimeUnit; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -103,10 +102,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(LargeTests.class) public class TestMobCompactor { - private static final Log LOG = LogFactory.getLog(TestMobCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMobCompactor.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf = null; private TableName tableName; @@ -715,7 +716,7 @@ public class TestMobCompactor { Thread.sleep(50); fileList = fs.listStatus(path); for (FileStatus fileStatus: fileList) { - LOG.info(fileStatus); + LOG.info(Objects.toString(fileStatus)); } } } @@ -1040,7 +1041,7 @@ public class TestMobCompactor { } } }); - ((ThreadPoolExecutor) pool).allowCoreThreadTimeOut(true); + pool.allowCoreThreadTimeOut(true); return pool; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java index bf899ea177a..34acbe493eb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java @@ -40,8 +40,6 @@ import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -86,10 +84,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(LargeTests.class) public class TestPartitionedMobCompactor { - private static final Log LOG = LogFactory.getLog(TestPartitionedMobCompactor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestPartitionedMobCompactor.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static String family = "family"; private final static String qf = "qf"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java index 2140cc1dc7c..94680f2c80d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java @@ -28,8 +28,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -85,6 +83,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -97,7 +97,7 @@ import static org.junit.Assert.fail; public class TestNamespaceAuditor { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestNamespaceAuditor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestNamespaceAuditor.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static Admin ADMIN; private String prefix = "TestNamespaceAuditor"; @@ -198,7 +198,7 @@ public class TestNamespaceAuditor { try { ADMIN.createNamespace(nspDesc); } catch (Exception exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -211,7 +211,7 @@ public class TestNamespaceAuditor { try { ADMIN.createNamespace(nspDesc); } catch (Exception exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -224,7 +224,7 @@ public class TestNamespaceAuditor { try { ADMIN.createNamespace(nspDesc); } catch (Exception exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -237,7 +237,7 @@ public class TestNamespaceAuditor { try { ADMIN.createNamespace(nspDesc); } catch (Exception exp) { - LOG.warn(exp); + LOG.warn(exp.toString(), exp); exceptionCaught = true; } finally { assertTrue(exceptionCaught); @@ -416,7 +416,7 @@ public class TestNamespaceAuditor { ADMIN.createTable(tableDescOne); fail("Table " + tableOne.toString() + "creation should fail."); } catch (Exception exp) { - LOG.error(exp); + LOG.error(exp.toString(), exp); } assertFalse(ADMIN.tableExists(tableOne)); @@ -429,7 +429,7 @@ public class TestNamespaceAuditor { ADMIN.createTable(tableDescOne); } catch (Exception e) { fail("Table " + tableOne.toString() + "creation should succeed."); - LOG.error(e); + LOG.error(e.toString(), e); } assertTrue(ADMIN.tableExists(tableOne)); nstate = getNamespaceState(nsp1); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java index 8eb2e588278..579fcd3ab0d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleMasterProcedureManager.java @@ -23,8 +23,6 @@ import java.util.HashMap; import java.util.List; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; @@ -32,13 +30,15 @@ import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.MetricsMaster; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SimpleMasterProcedureManager extends MasterProcedureManager { public static final String SIMPLE_SIGNATURE = "simple_test"; public static final String SIMPLE_DATA = "simple_test_data"; - private static final Log LOG = LogFactory.getLog(SimpleMasterProcedureManager.class); + private static final Logger LOG = LoggerFactory.getLogger(SimpleMasterProcedureManager.class); private MasterServices master; private ProcedureCoordinator coordinator; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java index 7d6f80a1dfb..f5a858ab93a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/SimpleRSProcedureManager.java @@ -29,8 +29,6 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.DaemonThreadFactory; @@ -39,10 +37,12 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SimpleRSProcedureManager extends RegionServerProcedureManager { - private static final Log LOG = LogFactory.getLog(SimpleRSProcedureManager.class); + private static final Logger LOG = LoggerFactory.getLogger(SimpleRSProcedureManager.class); private RegionServerServices rss; private ProcedureMemberRpcs memberRpcs; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java index 2f0b5b90eae..52a665d2428 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertArrayEquals; import java.io.IOException; import java.util.HashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -33,11 +31,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestProcedureManager { - private static final Log LOG = LogFactory.getLog(TestProcedureManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TestProcedureManager.class); private static final int NUM_RS = 2; private static HBaseTestingUtility util = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java index 36ea086dfef..177abbc6ce0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java @@ -34,8 +34,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -55,7 +53,8 @@ import org.mockito.internal.matchers.ArrayEquals; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.mockito.verification.VerificationMode; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -64,7 +63,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MasterTests.class, MediumTests.class}) public class TestZKProcedure { - private static final Log LOG = LogFactory.getLog(TestZKProcedure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedure.class); private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final String COORDINATOR_NODE_NAME = "coordinator"; private static final long KEEP_ALIVE = 100; // seconds diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java index 5d680743c5b..723ecd7ae2f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java @@ -28,8 +28,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -46,7 +44,8 @@ import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.mockito.verification.VerificationMode; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -55,7 +54,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MasterTests.class, MediumTests.class}) public class TestZKProcedureControllers { - private static final Log LOG = LogFactory.getLog(TestZKProcedureControllers.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedureControllers.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final String COHORT_NODE_NAME = "expected"; private static final String CONTROLLER_NODE_NAME = "controller"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java index 86df39fe529..be21f4969b3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java @@ -28,8 +28,6 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -48,14 +46,15 @@ import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; @InterfaceAudience.Private public class SpaceQuotaHelperForTests { - private static final Log LOG = LogFactory.getLog(SpaceQuotaHelperForTests.class); + private static final Logger LOG = LoggerFactory.getLogger(SpaceQuotaHelperForTests.class); public static final int SIZE_PER_VALUE = 256; public static final String F1 = "f1"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java index 3198aa3e1e1..46fb1e89086 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshotNotifierForTest.java @@ -19,10 +19,10 @@ package org.apache.hadoop.hbase.quotas; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; /** @@ -30,7 +30,8 @@ import org.apache.hadoop.hbase.client.Connection; */ @InterfaceAudience.Private public class SpaceQuotaSnapshotNotifierForTest implements SpaceQuotaSnapshotNotifier { - private static final Log LOG = LogFactory.getLog(SpaceQuotaSnapshotNotifierForTest.class); + private static final Logger LOG = + LoggerFactory.getLogger(SpaceQuotaSnapshotNotifierForTest.class); private final Map tableQuotaSnapshots = new HashMap<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java index 2a16739af55..788fadaa363 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hbase.quotas; +import java.util.Objects; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -44,7 +43,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import static org.junit.Assert.assertEquals; @@ -57,7 +57,7 @@ import static org.junit.Assert.fail; */ @Category({ClientTests.class, MediumTests.class}) public class TestQuotaAdmin { - private static final Log LOG = LogFactory.getLog(TestQuotaAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(TestQuotaAdmin.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -148,7 +148,7 @@ public class TestQuotaAdmin { int countThrottle = 0; int countGlobalBypass = 0; for (QuotaSettings settings: scanner) { - LOG.debug(settings); + LOG.debug(Objects.toString(settings)); switch (settings.getQuotaType()) { case THROTTLE: ThrottleSettings throttle = (ThrottleSettings)settings; @@ -419,7 +419,7 @@ public class TestQuotaAdmin { try { int count = 0; for (QuotaSettings settings: scanner) { - LOG.debug(settings); + LOG.debug(Objects.toString(settings)); count++; } return count; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java index c57a89ff170..8451acf5adb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreRegionReports.java @@ -26,8 +26,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -50,13 +48,16 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A test case to verify that region reports are expired when they are not sent. */ @Category(LargeTests.class) public class TestQuotaObserverChoreRegionReports { - private static final Log LOG = LogFactory.getLog(TestQuotaObserverChoreRegionReports.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestQuotaObserverChoreRegionReports.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java index 736be8d0f46..06a06fd4edb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java @@ -32,8 +32,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -52,7 +50,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; @@ -61,7 +60,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; */ @Category(LargeTests.class) public class TestQuotaObserverChoreWithMiniCluster { - private static final Log LOG = LogFactory.getLog(TestQuotaObserverChoreWithMiniCluster.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestQuotaObserverChoreWithMiniCluster.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final AtomicLong COUNTER = new AtomicLong(0); private static final long DEFAULT_WAIT_MILLIS = 500; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java index a5fe4069a27..a44ad74f506 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaStatusRPCs.java @@ -26,8 +26,6 @@ import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -48,13 +46,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test class for the quota status RPCs in the master and regionserver. */ @Category({MediumTests.class}) public class TestQuotaStatusRPCs { - private static final Log LOG = LogFactory.getLog(TestQuotaStatusRPCs.class); + private static final Logger LOG = LoggerFactory.getLogger(TestQuotaStatusRPCs.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final AtomicLong COUNTER = new AtomicLong(0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java index b1b2797ce49..dfb3484df06 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java @@ -18,17 +18,15 @@ package org.apache.hadoop.hbase.quotas; +import java.util.Objects; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -44,12 +42,14 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; @Category({RegionServerTests.class, MediumTests.class}) public class TestQuotaThrottle { - private final static Log LOG = LogFactory.getLog(TestQuotaThrottle.class); + private final static Logger LOG = LoggerFactory.getLogger(TestQuotaThrottle.class); private final static int REFRESH_TIME = 30 * 60000; @@ -587,9 +587,9 @@ public class TestQuotaThrottle { } LOG.debug("QuotaCache"); - LOG.debug(quotaCache.getNamespaceQuotaCache()); - LOG.debug(quotaCache.getTableQuotaCache()); - LOG.debug(quotaCache.getUserQuotaCache()); + LOG.debug(Objects.toString(quotaCache.getNamespaceQuotaCache())); + LOG.debug(Objects.toString(quotaCache.getTableQuotaCache())); + LOG.debug(Objects.toString(quotaCache.getUserQuotaCache())); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java index 035216c6e6d..021ebf23367 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestRegionSizeUse.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -47,13 +45,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test class which verifies that region sizes are reported to the master. */ @Category(MediumTests.class) public class TestRegionSizeUse { - private static final Log LOG = LogFactory.getLog(TestRegionSizeUse.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionSizeUse.class); private static final int SIZE_PER_VALUE = 256; private static final int NUM_SPLITS = 10; private static final String F1 = "f1"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java index 4022e3ffeba..368ab127926 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java @@ -26,8 +26,6 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; @@ -57,17 +55,18 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Multimap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * Test class for the {@link SnapshotQuotaObserverChore}. */ @Category(MediumTests.class) public class TestSnapshotQuotaObserverChore { - private static final Log LOG = LogFactory.getLog(TestSnapshotQuotaObserverChore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotQuotaObserverChore.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final AtomicLong COUNTER = new AtomicLong(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java index e923cc611bb..98186c57506 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java @@ -29,8 +29,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -72,13 +70,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * End-to-end test class for filesystem space quotas. */ @Category(LargeTests.class) public class TestSpaceQuotas { - private static final Log LOG = LogFactory.getLog(TestSpaceQuotas.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSpaceQuotas.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); // Global for all tests in the class private static final AtomicLong COUNTER = new AtomicLong(0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java index 85c7de237c6..6ae28e6020e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; @@ -51,15 +49,16 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - -import com.google.common.collect.Iterables; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * Test class to exercise the inclusion of snapshots in space quotas */ @Category({LargeTests.class}) public class TestSpaceQuotasWithSnapshots { - private static final Log LOG = LogFactory.getLog(TestSpaceQuotasWithSnapshots.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSpaceQuotasWithSnapshots.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); // Global for all tests in the class private static final AtomicLong COUNTER = new AtomicLong(0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java index 3a60cbbd196..300268f0877 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java @@ -26,8 +26,6 @@ import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -51,13 +49,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test class to verify that the HBase superuser can override quotas. */ @Category(MediumTests.class) public class TestSuperUserQuotaPermissions { - private static final Log LOG = LogFactory.getLog(TestSuperUserQuotaPermissions.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSuperUserQuotaPermissions.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); // Default to the user running the tests private static final String SUPERUSER_NAME = System.getProperty("user.name"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java index a311501d9a0..4942c228e5e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java @@ -28,8 +28,6 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -43,6 +41,8 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.io.BytesWritable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Creates an HFile with random key/value pairs. @@ -55,8 +55,8 @@ public class CreateRandomStoreFile { */ private static final int LEN_VARIATION = 5; - private static final Log LOG = - LogFactory.getLog(CreateRandomStoreFile.class); + private static final Logger LOG = + LoggerFactory.getLogger(CreateRandomStoreFile.class); private static final String OUTPUT_DIR_OPTION = "o"; private static final String NUM_KV_OPTION = "n"; private static final String HFILE_VERSION_OPTION = "h"; @@ -122,7 +122,7 @@ public class CreateRandomStoreFile { try { cmdLine = parser.parse(options, args); } catch (ParseException ex) { - LOG.error(ex); + LOG.error(ex.toString(), ex); return false; } @@ -172,12 +172,12 @@ public class CreateRandomStoreFile { int blockSize = HConstants.DEFAULT_BLOCKSIZE; if (cmdLine.hasOption(BLOCK_SIZE_OPTION)) blockSize = Integer.valueOf(cmdLine.getOptionValue(BLOCK_SIZE_OPTION)); - + if (cmdLine.hasOption(BLOOM_BLOCK_SIZE_OPTION)) { conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE, Integer.valueOf(cmdLine.getOptionValue(BLOOM_BLOCK_SIZE_OPTION))); } - + if (cmdLine.hasOption(INDEX_BLOCK_SIZE_OPTION)) { conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, Integer.valueOf(cmdLine.getOptionValue(INDEX_BLOCK_SIZE_OPTION))); @@ -299,7 +299,7 @@ public class CreateRandomStoreFile { if (!app.run(args)) System.exit(EXIT_FAILURE); } catch (IOException ex) { - LOG.error(ex); + LOG.error(ex.toString(), ex); System.exit(EXIT_FAILURE); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java index 5f858264ad8..ab1501ee357 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java @@ -32,8 +32,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -55,13 +53,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests various algorithms for key compression on an existing HFile. Useful * for testing, debugging and benchmarking. */ public class DataBlockEncodingTool { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( DataBlockEncodingTool.class); private static final boolean includesMemstoreTS = true; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java index af82692aa65..774888c642a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java @@ -27,13 +27,12 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -75,6 +74,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing of HRegion.incrementColumnValue, HRegion.increment, @@ -82,7 +83,7 @@ import org.junit.rules.TestName; */ @Category({VerySlowRegionServerTests.class, MediumTests.class}) // Starts 100 threads public class TestAtomicOperation { - private static final Log LOG = LogFactory.getLog(TestAtomicOperation.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAtomicOperation.class); @Rule public TestName name = new TestName(); HRegion region = null; @@ -107,8 +108,8 @@ public class TestAtomicOperation { public void teardown() throws IOException { if (region != null) { BlockCache bc = region.getStores().get(0).getCacheConfig().getBlockCache(); - ((HRegion)region).close(); - WAL wal = ((HRegion)region).getWAL(); + region.close(); + WAL wal = region.getWAL(); if (wal != null) wal.close(); if (bc != null) bc.shutdown(); region = null; @@ -428,7 +429,7 @@ public class TestAtomicOperation { Get g = new Get(row); Result r = region.get(g); if (r.size() != 1) { - LOG.debug(r); + LOG.debug(Objects.toString(r)); failures.incrementAndGet(); fail(); } @@ -525,7 +526,7 @@ public class TestAtomicOperation { ; rs.close(); if (r.size() != 1) { - LOG.debug(r); + LOG.debug(Objects.toString(r)); failures.incrementAndGet(); fail(); } @@ -627,6 +628,7 @@ public class TestAtomicOperation { this.region = region; } + @Override public void doWork() throws Exception { Put[] puts = new Put[1]; Put put = new Put(Bytes.toBytes("r1")); @@ -644,6 +646,7 @@ public class TestAtomicOperation { this.region = region; } + @Override public void doWork() throws Exception { Put[] puts = new Put[1]; Put put = new Put(Bytes.toBytes("r1")); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java index c40b24a47ac..6331e869196 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java @@ -24,8 +24,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -50,13 +48,15 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.junit.*; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static junit.framework.TestCase.assertTrue; import static org.junit.Assert.assertEquals; @Category({RegionServerTests.class, MediumTests.class}) public class TestBlocksRead { - private static final Log LOG = LogFactory.getLog(TestBlocksRead.class); + private static final Logger LOG = LoggerFactory.getLogger(TestBlocksRead.class); @Rule public TestName testName = new TestName(); static final BloomType[] BLOOM_TYPE = new BloomType[] { BloomType.ROWCOL, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java index feb456efc9e..02a21b6aac4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java @@ -27,8 +27,6 @@ import java.util.Collection; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -63,6 +61,8 @@ import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests {@link HFile} cache-on-write functionality for data blocks, non-root @@ -72,7 +72,7 @@ import org.junit.runners.Parameterized.Parameters; @Category({RegionServerTests.class, MediumTests.class}) public class TestCacheOnWriteInSchema { - private static final Log LOG = LogFactory.getLog(TestCacheOnWriteInSchema.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCacheOnWriteInSchema.class); @Rule public TestName name = new TestName(); private static final HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java index 4a26f76cb0a..f6cd4e29d4f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java @@ -30,8 +30,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -52,6 +50,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, MediumTests.class}) public class TestColumnSeeking { @@ -60,7 +60,7 @@ public class TestColumnSeeking { withLookingForStuckThread(true).build(); private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); - private static final Log LOG = LogFactory.getLog(TestColumnSeeking.class); + private static final Logger LOG = LoggerFactory.getLogger(TestColumnSeeking.class); @SuppressWarnings("unchecked") @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java index 6d064946706..4df21e0bf17 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java @@ -18,8 +18,7 @@ package org.apache.hadoop.hbase.regionserver; import java.util.Collection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -36,12 +35,14 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; @Category(MediumTests.class) public class TestCompactSplitThread { - private static final Log LOG = LogFactory.getLog(TestCompactSplitThread.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCompactSplitThread.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final TableName tableName = TableName.valueOf(getClass().getSimpleName()); private final byte[] family = Bytes.toBytes("f"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java index 0f18deeab4e..6fbf99adf61 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java @@ -23,8 +23,6 @@ import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -54,6 +52,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -64,7 +64,7 @@ import static org.junit.Assert.assertTrue; @Category({RegionServerTests.class, MediumTests.class}) public class TestCompactingMemStore extends TestDefaultMemStore { - private static final Log LOG = LogFactory.getLog(TestCompactingMemStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCompactingMemStore.class); protected static ChunkCreator chunkCreator; protected HRegion region; protected RegionServicesForStores regionServicesForStores; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java index 37fa85fcbb0..b77359d024e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -38,6 +36,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; @@ -52,7 +52,8 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore public static Object[] data() { return new Object[] { "CHUNK_MAP", "ARRAY_MAP" }; // test different immutable indexes } - private static final Log LOG = LogFactory.getLog(TestCompactingToCellFlatMapMemStore.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestCompactingToCellFlatMapMemStore.class); public final boolean toCellChunkMap; Configuration conf; ////////////////////////////////////////////////////////////////////////////// @@ -87,6 +88,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore ////////////////////////////////////////////////////////////////////////////// // Compaction tests ////////////////////////////////////////////////////////////////////////////// + @Override public void testCompaction1Bucket() throws IOException { int counter = 0; String[] keys1 = { "A", "A", "B", "C" }; //A1, A2, B3, C4 @@ -134,6 +136,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore memstore.clearSnapshot(snapshot.getId()); } + @Override public void testCompaction2Buckets() throws IOException { if (toCellChunkMap) { // set memstore to flat into CellChunkMap @@ -195,6 +198,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore memstore.clearSnapshot(snapshot.getId()); } + @Override public void testCompaction3Buckets() throws IOException { if (toCellChunkMap) { // set memstore to flat into CellChunkMap @@ -572,6 +576,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore assertTrue(chunkCreator.getPoolSize() > 0); } + @Override @Test public void testPuttingBackChunksAfterFlushing() throws IOException { byte[] row = Bytes.toBytes("testrow"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java index 5735e882729..112fe4de30d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionFileNotFound.java @@ -24,8 +24,6 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -43,6 +41,8 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class tests the scenario where a store refresh happens due to a file not found during scan, @@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category; */ @Category(MediumTests.class) public class TestCompactionFileNotFound { - private static final Log LOG = LogFactory.getLog(TestCompactionFileNotFound.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCompactionFileNotFound.class); private static final HBaseTestingUtility util = new HBaseTestingUtility(); private static final TableName TEST_TABLE = TableName.valueOf("test"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java index 1b39a6d3630..457f0029872 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionInDeadRegionServer.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -53,6 +51,8 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This testcase is used to ensure that the compaction marker will fail a compaction if the RS is @@ -61,7 +61,7 @@ import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) @Category({ RegionServerTests.class, LargeTests.class }) public class TestCompactionInDeadRegionServer { - private static final Log LOG = LogFactory.getLog(TestCompactionInDeadRegionServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCompactionInDeadRegionServer.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java index 1e9fd409ce1..56931b2834e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -43,11 +41,12 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestCompactionPolicy { - private final static Log LOG = LogFactory.getLog(TestCompactionPolicy.class); + private final static Logger LOG = LoggerFactory.getLogger(TestCompactionPolicy.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java index b30a88459ac..241d0648dec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.client.Admin; @@ -43,11 +41,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Unit tests to test retrieving table/region compaction state*/ @Category({VerySlowRegionServerTests.class, LargeTests.class}) public class TestCompactionState { - private static final Log LOG = LogFactory.getLog(TestCompactionState.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCompactionState.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static Random random = new Random(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index 58c76ca1411..12763f9fff7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -32,8 +32,6 @@ import java.util.Collections; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -60,6 +58,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests writing Bloom filter blocks in the same part of the file as data @@ -71,7 +71,7 @@ public class TestCompoundBloomFilter { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( TestCompoundBloomFilter.class); private static final int NUM_TESTS = 9; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java index 0a0bf5d720d..d4f60859de0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java @@ -27,11 +27,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -67,7 +66,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -75,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** memstore test case */ @Category({RegionServerTests.class, MediumTests.class}) public class TestDefaultMemStore { - private static final Log LOG = LogFactory.getLog(TestDefaultMemStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDefaultMemStore.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); @@ -167,7 +167,7 @@ public class TestDefaultMemStore { int count = 0; try (StoreScanner s = new StoreScanner(scan, scanInfo, null, memstorescanners)) { while (s.next(result)) { - LOG.info(result); + LOG.info(Objects.toString(result)); count++; // Row count is same as column count. assertEquals(rowCount, result.size()); @@ -184,7 +184,7 @@ public class TestDefaultMemStore { count = 0; try (StoreScanner s = new StoreScanner(scan, scanInfo, null, memstorescanners)) { while (s.next(result)) { - LOG.info(result); + LOG.info(Objects.toString(result)); // Assert the stuff is coming out in right order. assertTrue(CellUtil.matchingRows(result.get(0), Bytes.toBytes(count))); count++; @@ -208,7 +208,7 @@ public class TestDefaultMemStore { int snapshotIndex = 5; try (StoreScanner s = new StoreScanner(scan, scanInfo, null, memstorescanners)) { while (s.next(result)) { - LOG.info(result); + LOG.info(Objects.toString(result)); // Assert the stuff is coming out in right order. assertTrue(CellUtil.matchingRows(result.get(0), Bytes.toBytes(count))); // Row count is same as column count. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java index b2ef2f7f8ba..ec714be7ae8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java @@ -29,8 +29,6 @@ import java.util.List; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -57,10 +55,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, MediumTests.class}) public class TestEncryptionKeyRotation { - private static final Log LOG = LogFactory.getLog(TestEncryptionKeyRotation.class); + private static final Logger LOG = LoggerFactory.getLogger(TestEncryptionKeyRotation.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Configuration conf = TEST_UTIL.getConfiguration(); private static final Key initialCFKey; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java index fa4d7f06725..e5b8a6146a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java @@ -30,8 +30,6 @@ import java.util.TreeSet; import java.util.stream.Collectors; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -63,12 +61,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators; @Category(LargeTests.class) public class TestEndToEndSplitTransaction { - private static final Log LOG = LogFactory.getLog(TestEndToEndSplitTransaction.class); + private static final Logger LOG = LoggerFactory.getLogger(TestEndToEndSplitTransaction.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Configuration CONF = TEST_UTIL.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java index 48081bdbb29..bd66bde3da5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java @@ -29,8 +29,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FilterFileSystem; @@ -58,6 +56,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test cases that ensure that file system level errors are bubbled up @@ -65,7 +65,7 @@ import org.junit.rules.TestName; */ @Category({RegionServerTests.class, MediumTests.class}) public class TestFSErrorsExposed { - private static final Log LOG = LogFactory.getLog(TestFSErrorsExposed.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFSErrorsExposed.class); HBaseTestingUtility util = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java index 015894901c3..84e424eb84f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java @@ -27,8 +27,6 @@ import static org.mockito.Mockito.when; import java.io.IOException; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -55,6 +53,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; import org.mockito.exceptions.verification.WantedButNotInvoked; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing sync/append failures. @@ -62,7 +62,7 @@ import org.mockito.exceptions.verification.WantedButNotInvoked; */ @Category({MediumTests.class}) public class TestFailedAppendAndSync { - private static final Log LOG = LogFactory.getLog(TestFailedAppendAndSync.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFailedAppendAndSync.class); @Rule public TestName name = new TestName(); private static final String COLUMN_FAMILY = "MyCF"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java index 9c87ebe43f7..642b47108f4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java @@ -21,9 +21,8 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -49,6 +48,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -60,7 +61,7 @@ import static org.junit.Assert.assertTrue; @Category({RegionServerTests.class, MediumTests.class}) public class TestGetClosestAtOrBefore { @Rule public TestName testName = new TestName(); - private static final Log LOG = LogFactory.getLog(TestGetClosestAtOrBefore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestGetClosestAtOrBefore.class); private static final byte[] T00 = Bytes.toBytes("000"); private static final byte[] T10 = Bytes.toBytes("010"); @@ -104,8 +105,8 @@ public class TestGetClosestAtOrBefore { InternalScanner s = mr.getScanner(new Scan()); try { List keys = new ArrayList<>(); - while (s.next(keys)) { - LOG.info(keys); + while (s.next(keys)) { + LOG.info(Objects.toString(keys)); keys.clear(); } } finally { @@ -293,8 +294,8 @@ public class TestGetClosestAtOrBefore { } finally { if (region != null) { try { - WAL wal = ((HRegion)region).getWAL(); - ((HRegion)region).close(); + WAL wal = region.getWAL(); + region.close(); wal.close(); } catch (Exception e) { e.printStackTrace(); @@ -351,8 +352,8 @@ public class TestGetClosestAtOrBefore { } finally { if (region != null) { try { - WAL wal = ((HRegion)region).getWAL(); - ((HRegion)region).close(); + WAL wal = region.getWAL(); + region.close(); wal.close(); } catch (Exception e) { e.printStackTrace(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java index 95efa806258..32e3856f0a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java @@ -35,8 +35,6 @@ import java.util.concurrent.ConcurrentSkipListSet; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -78,10 +76,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) public class TestHMobStore { - public static final Log LOG = LogFactory.getLog(TestHMobStore.class); + public static final Logger LOG = LoggerFactory.getLogger(TestHMobStore.class); @Rule public TestName name = new TestName(); private HMobStore store; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 3482955b2ae..0098091699b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -51,6 +51,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; +import java.util.Objects; import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.Callable; @@ -64,8 +65,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.lang3.RandomStringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -177,7 +176,8 @@ import org.mockito.ArgumentMatcher; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -199,7 +199,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescript public class TestHRegion { // Do not spin up clusters in here. If you need to spin up a cluster, do it // over in TestHRegionOnCluster. - private static final Log LOG = LogFactory.getLog(TestHRegion.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHRegion.class); @Rule public TestName name = new TestName(); @ClassRule @@ -927,7 +927,7 @@ public class TestHRegion { // now check whether we have only one store file, the compacted one Collection sfs = region.getStore(family).getStorefiles(); for (HStoreFile sf : sfs) { - LOG.info(sf.getPath()); + LOG.info(Objects.toString(sf.getPath())); } if (!mismatchedRegionName) { assertEquals(1, region.getStore(family).getStorefilesCount()); @@ -1081,7 +1081,7 @@ public class TestHRegion { try { desc = WALEdit.getFlushDescriptor(cells.get(0)); } catch (IOException e) { - LOG.warn(e); + LOG.warn(e.toString(), e); return false; } if (desc != null) { @@ -2143,8 +2143,8 @@ public class TestHRegion { byte[] value = Bytes.toBytes("value"); Put put = new Put(row1); - put.addColumn(fam1, qual, (long) 1, value); - put.addColumn(fam1, qual, (long) 2, value); + put.addColumn(fam1, qual, 1, value); + put.addColumn(fam1, qual, 2, value); this.region = initHRegion(tableName, method, CONF, fam1); try { @@ -2591,7 +2591,7 @@ public class TestHRegion { // extract the key values out the memstore: // This is kinda hacky, but better than nothing... long now = System.currentTimeMillis(); - AbstractMemStore memstore = (AbstractMemStore)((HStore) region.getStore(fam1)).memstore; + AbstractMemStore memstore = (AbstractMemStore)region.getStore(fam1).memstore; Cell firstCell = memstore.getActive().first(); assertTrue(firstCell.getTimestamp() <= now); now = firstCell.getTimestamp(); @@ -3879,7 +3879,7 @@ public class TestHRegion { byte[] value = Bytes.toBytes(String.valueOf(numPutsFinished)); for (byte[] family : families) { for (byte[] qualifier : qualifiers) { - put.addColumn(family, qualifier, (long) numPutsFinished, value); + put.addColumn(family, qualifier, numPutsFinished, value); } } region.put(put); @@ -4115,7 +4115,7 @@ public class TestHRegion { region.flush(true); } // before compaction - HStore store = (HStore) region.getStore(fam1); + HStore store = region.getStore(fam1); Collection storeFiles = store.getStorefiles(); for (HStoreFile storefile : storeFiles) { StoreFileReader reader = storefile.getReader(); @@ -4208,7 +4208,7 @@ public class TestHRegion { byte col[] = Bytes.toBytes("col1"); Put put = new Put(row); - put.addColumn(familyName, col, (long) 1, Bytes.toBytes("SomeRandomValue")); + put.addColumn(familyName, col, 1, Bytes.toBytes("SomeRandomValue")); region.put(put); region.flush(true); @@ -4255,8 +4255,8 @@ public class TestHRegion { byte col[] = Bytes.toBytes("col1"); Put put = new Put(row); - put.addColumn(fam1, col, (long) 1, Bytes.toBytes("test1")); - put.addColumn(fam2, col, (long) 1, Bytes.toBytes("test2")); + put.addColumn(fam1, col, 1, Bytes.toBytes("test1")); + put.addColumn(fam2, col, 1, Bytes.toBytes("test2")); ht.put(put); HRegion firstRegion = htu.getHBaseCluster().getRegions(tableName).get(0); @@ -6430,7 +6430,7 @@ public class TestHRegion { byte[] value = Bytes.toBytes(String.valueOf(count)); for (byte[] family : families) { for (byte[] qualifier : qualifiers) { - put.addColumn(family, qualifier, (long) count, value); + put.addColumn(family, qualifier, count, value); } } try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java index 829b4884b55..0514005b603 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java @@ -30,8 +30,6 @@ import java.net.URI; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -57,11 +55,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, SmallTests.class}) public class TestHRegionFileSystem { private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestHRegionFileSystem.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHRegionFileSystem.class); public static final byte[] FAMILY_NAME = Bytes.toBytes("info"); private static final byte[][] FAMILIES = { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java index 41ad68bf5b2..9b89768dbfb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -48,6 +46,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -58,7 +58,7 @@ import org.junit.rules.TestName; @Category({RegionServerTests.class, MediumTests.class}) public class TestHRegionOnCluster { - private static final Log LOG = LogFactory.getLog(TestHRegionOnCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHRegionOnCluster.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index ab5922a5423..5f506cac56d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -40,11 +40,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Random; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; @@ -78,7 +77,6 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALFactory; -import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay; import org.apache.hadoop.util.StringUtils; @@ -89,6 +87,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -109,7 +109,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescript @Category(MediumTests.class) public class TestHRegionReplayEvents { - private static final Log LOG = LogFactory.getLog(TestHRegion.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHRegion.class); @Rule public TestName name = new TestName(); private static HBaseTestingUtility TEST_UTIL; @@ -1208,15 +1208,13 @@ public class TestHRegionReplayEvents { @Test public void testWriteFlushRequestMarker() throws IOException { // primary region is empty at this point. Request a flush with writeFlushRequestWalMarker=false - FlushResultImpl result = (FlushResultImpl) ((HRegion) primaryRegion).flushcache(true, false, - FlushLifeCycleTracker.DUMMY); + FlushResultImpl result = primaryRegion.flushcache(true, false, FlushLifeCycleTracker.DUMMY); assertNotNull(result); assertEquals(result.result, FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY); assertFalse(result.wroteFlushWalMarker); // request flush again, but this time with writeFlushRequestWalMarker = true - result = (FlushResultImpl) ((HRegion) primaryRegion).flushcache(true, true, - FlushLifeCycleTracker.DUMMY); + result = primaryRegion.flushcache(true, true, FlushLifeCycleTracker.DUMMY); assertNotNull(result); assertEquals(result.result, FlushResultImpl.Result.CANNOT_FLUSH_MEMSTORE_EMPTY); assertTrue(result.wroteFlushWalMarker); @@ -1294,7 +1292,7 @@ public class TestHRegionReplayEvents { reader = createWALReaderForPrimary(); while (true) { WAL.Entry entry = reader.next(); - LOG.info(entry); + LOG.info(Objects.toString(entry)); if (entry == null) { break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index 723b5706dfa..831d46bece7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -30,8 +30,6 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -83,7 +81,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; @@ -96,7 +95,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegi @RunWith(Parameterized.class) @Category({RegionServerTests.class, LargeTests.class}) public class TestHRegionServerBulkLoad { - private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class); protected static HBaseTestingUtility UTIL = new HBaseTestingUtility(); protected final static Configuration conf = UTIL.getConfiguration(); protected final static byte[] QUAL = Bytes.toBytes("qual"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java index da4b740de81..7d652aa6fa5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; @@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -56,12 +55,13 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @RunWith(Parameterized.class) @Category({RegionServerTests.class, LargeTests.class}) public class TestHRegionServerBulkLoadWithOldClient extends TestHRegionServerBulkLoad { + private static final Logger LOG = + LoggerFactory.getLogger(TestHRegionServerBulkLoadWithOldClient.class); + public TestHRegionServerBulkLoadWithOldClient(int duration) { super(duration); } - private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoadWithOldClient.class); - public static class AtomicHFileLoader extends RepeatingTestThread { final AtomicLong numBulkLoads = new AtomicLong(); final AtomicLong numCompactions = new AtomicLong(); @@ -73,6 +73,7 @@ public class TestHRegionServerBulkLoadWithOldClient extends TestHRegionServerBul this.tableName = tableName; } + @Override public void doAnAction() throws Exception { long iteration = numBulkLoads.getAndIncrement(); Path dir = UTIL.getDataTestDirOnTestFS(String.format("bulkLoad_%08d", @@ -134,6 +135,7 @@ public class TestHRegionServerBulkLoadWithOldClient extends TestHRegionServerBul } } + @Override void runAtomicBulkloadTest(TableName tableName, int millisToRun, int numScanners) throws Exception { setupTable(tableName, 10); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java index 5d432c9d371..ac10a35292b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionWithInMemoryFlush.java @@ -19,8 +19,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -31,6 +30,8 @@ import org.apache.hadoop.hbase.wal.WAL; import org.junit.ClassRule; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A test similar to TestHRegion, but with in-memory flush families. @@ -41,7 +42,7 @@ import org.junit.rules.TestRule; public class TestHRegionWithInMemoryFlush extends TestHRegion{ // Do not spin up clusters in here. If you need to spin up a cluster, do it // over in TestHRegionOnCluster. - private static final Log LOG = LogFactory.getLog(TestHRegionWithInMemoryFlush.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHRegionWithInMemoryFlush.class); @ClassRule public static final TestRule timeout = CategoryBasedTimeout.forClass(TestHRegionWithInMemoryFlush.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java index 61958483921..2745d3d7877 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java @@ -48,8 +48,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -112,7 +110,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -120,7 +119,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ @Category({ RegionServerTests.class, MediumTests.class }) public class TestHStore { - private static final Log LOG = LogFactory.getLog(TestHStore.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHStore.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 4715e9570f1..c9affaffc20 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -30,8 +30,6 @@ import java.util.OptionalLong; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -70,7 +68,8 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @@ -83,7 +82,7 @@ import static org.mockito.Mockito.when; */ @Category({RegionServerTests.class, SmallTests.class}) public class TestHStoreFile extends HBaseTestCase { - private static final Log LOG = LogFactory.getLog(TestHStoreFile.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHStoreFile.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration()); private static String ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFile").toString(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java index 60ed50b49ae..d218389c899 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java @@ -29,8 +29,6 @@ import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; @@ -51,6 +49,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test performance improvement of joined scanners optimization: @@ -58,7 +58,7 @@ import org.junit.rules.TestName; */ @Category({RegionServerTests.class, LargeTests.class}) public class TestJoinedScanners { - private static final Log LOG = LogFactory.getLog(TestJoinedScanners.class); + private static final Logger LOG = LoggerFactory.getLogger(TestJoinedScanners.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final String DIR = TEST_UTIL.getDataTestDir("TestJoinedScanners").toString(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index 40f513562df..2684bdf90ce 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -35,8 +35,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -69,6 +67,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test major compactions @@ -81,7 +81,7 @@ public class TestMajorCompaction { return new Object[] { "NONE", "BASIC", "EAGER" }; } @Rule public TestName name; - private static final Log LOG = LogFactory.getLog(TestMajorCompaction.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName()); private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU(); protected Configuration conf = UTIL.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java index f1b4441a5f7..e4b37541ae3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.util.concurrent.Semaphore; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -39,11 +37,13 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.Rule; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.junit.experimental.categories.Category; @Category({RegionServerTests.class, MediumTests.class}) public class TestMasterAddressTracker { - private static final Log LOG = LogFactory.getLog(TestMasterAddressTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMasterAddressTracker.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -157,7 +157,7 @@ public class TestMasterAddressTracker { } public static class NodeCreationListener extends ZKListener { - private static final Log LOG = LogFactory.getLog(NodeCreationListener.class); + private static final Logger LOG = LoggerFactory.getLogger(NodeCreationListener.class); private Semaphore lock; private String node; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java index a7b5cd5a005..98b0761a62b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java @@ -24,8 +24,6 @@ import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -45,6 +43,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test minor compactions @@ -52,7 +52,7 @@ import org.junit.rules.TestName; @Category({RegionServerTests.class, MediumTests.class}) public class TestMinorCompaction { @Rule public TestName name = new TestName(); - private static final Log LOG = LogFactory.getLog(TestMinorCompaction.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(TestMinorCompaction.class.getName()); private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU(); protected Configuration conf = UTIL.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java index 3706f4e6bd1..c6a300fe119 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java @@ -33,8 +33,6 @@ import java.util.Map; import java.util.Random; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -73,6 +71,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test mob store compaction @@ -81,7 +81,7 @@ import org.junit.rules.TestName; public class TestMobStoreCompaction { @Rule public TestName name = new TestName(); - static final Log LOG = LogFactory.getLog(TestMobStoreCompaction.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(TestMobStoreCompaction.class.getName()); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private Configuration conf = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index bd7639ecd50..19685ea395e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -35,8 +35,6 @@ import java.util.Set; import java.util.TreeSet; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; @@ -58,6 +56,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests optimized scanning of multiple columns. @@ -66,7 +66,7 @@ import org.junit.runners.Parameterized.Parameters; @Category({RegionServerTests.class, MediumTests.class}) public class TestMultiColumnScanner { - private static final Log LOG = LogFactory.getLog(TestMultiColumnScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMultiColumnScanner.class); private static final String TABLE_NAME = TestMultiColumnScanner.class.getSimpleName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java index 6bbb81dda0b..ec161dc37e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -49,6 +47,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -57,7 +57,7 @@ import org.junit.rules.TestName; */ @Category({RegionServerTests.class, MediumTests.class}) public class TestParallelPut { - private static final Log LOG = LogFactory.getLog(TestParallelPut.class); + private static final Logger LOG = LoggerFactory.getLogger(TestParallelPut.class); @Rule public TestName name = new TestName(); private HRegion region = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java index b8155e45713..cf1c104105e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java @@ -17,9 +17,16 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.hadoop.hbase.shaded.com.google.common.hash.Hashing; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Random; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -48,23 +55,17 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WAL; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Random; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import org.apache.hadoop.hbase.shaded.com.google.common.hash.Hashing; /** * This test verifies the correctness of the Per Column Family flushing strategy */ @Category({ RegionServerTests.class, LargeTests.class }) public class TestPerColumnFamilyFlush { - private static final Log LOG = LogFactory.getLog(TestPerColumnFamilyFlush.class); + private static final Logger LOG = LoggerFactory.getLogger(TestPerColumnFamilyFlush.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java index 1b2574e7a5a..b4d04fcd8d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java @@ -27,11 +27,8 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; -import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.LocalHBaseCluster; @@ -53,7 +50,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse; /** @@ -62,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto */ @Category({RegionServerTests.class, MediumTests.class}) @Ignore("See HBASE-19515") public class TestRSKilledWhenInitializing { - private static final Log LOG = LogFactory.getLog(TestRSKilledWhenInitializing.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRSKilledWhenInitializing.class); @Rule public TestName testName = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java index c9cae7ee434..412018f405f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.io.StringWriter; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; @@ -47,7 +45,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -61,7 +60,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerIn */ @Category({RegionServerTests.class, SmallTests.class}) public class TestRSStatusServlet { - private static final Log LOG = LogFactory.getLog(TestRSStatusServlet.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRSStatusServlet.class); private HRegionServer rs; private RSRpcServices rpcServices; private RpcServerInterface rpcServer; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java index 2c9a4375edd..97df08afb63 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -54,6 +52,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests around replay of recovered.edits content. @@ -61,7 +61,7 @@ import org.junit.rules.TestName; @Category({MediumTests.class}) public class TestRecoveredEdits { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestRecoveredEdits.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRecoveredEdits.class); @Rule public TestName testName = new TestName(); /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java index 394b8a2e6aa..d913c67815d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java @@ -24,8 +24,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.ThreadLocalRandom; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -47,6 +45,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -60,7 +60,7 @@ import org.junit.rules.TestRule; */ @Category(MediumTests.class) public class TestRegionIncrement { - private static final Log LOG = LogFactory.getLog(TestRegionIncrement.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionIncrement.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java index ede9764fd8c..0fba4e13633 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java @@ -26,13 +26,12 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.lang3.RandomUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -77,7 +76,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -87,7 +87,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto @Category({RegionServerTests.class, LargeTests.class}) public class TestRegionMergeTransactionOnCluster { - private static final Log LOG = LogFactory.getLog(TestRegionMergeTransactionOnCluster.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestRegionMergeTransactionOnCluster.class); + @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()). @@ -426,8 +428,8 @@ public class TestRegionMergeTransactionOnCluster { MetaTableAccessor.getTableRegionsAndLocations(TEST_UTIL.getConnection(), tablename); tableRegionsInMaster = master.getAssignmentManager().getRegionStates().getRegionsOfTable(tablename); - LOG.info(tableRegionsInMaster); - LOG.info(tableRegionsInMeta); + LOG.info(Objects.toString(tableRegionsInMaster)); + LOG.info(Objects.toString(tableRegionsInMeta)); int tableRegionsInMetaSize = tableRegionsInMeta.size(); int tableRegionsInMasterSize = tableRegionsInMaster.size(); if (tableRegionsInMetaSize == expectedRegionNum diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java index 1f8d16be1ad..fb3607a1620 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionOpen.java @@ -26,8 +26,6 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -51,12 +49,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.fail; @Category({MediumTests.class, RegionServerTests.class}) public class TestRegionOpen { @SuppressWarnings("unused") - private static final Log LOG = LogFactory.getLog(TestRegionOpen.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionOpen.class); private static final int NB_SERVERS = 1; private static final HBaseTestingUtility HTU = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java index 7a6e2fb8a5a..3889e55a272 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java @@ -26,9 +26,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -40,7 +37,6 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.RpcRetryingCallerImpl; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.replication.regionserver.TestRegionReplicaReplicationEndpoint; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -48,25 +44,22 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests failover of secondary region replicas. */ @Category(LargeTests.class) public class TestRegionReplicaFailover { - - private static final Log LOG = LogFactory.getLog(TestRegionReplicaReplicationEndpoint.class); - - static { - ((Log4JLogger)RpcRetryingCallerImpl.LOG).getLogger().setLevel(Level.ALL); - } + private static final Logger LOG = + LoggerFactory.getLogger(TestRegionReplicaReplicationEndpoint.class); private static final HBaseTestingUtility HTU = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java index 41bf11e249e..276eee9c693 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java @@ -30,8 +30,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -57,7 +55,8 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; @@ -68,7 +67,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; */ @Category({RegionServerTests.class, MediumTests.class}) public class TestRegionReplicas { - private static final Log LOG = LogFactory.getLog(TestRegionReplicas.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionReplicas.class); private static final int NB_SERVERS = 1; private static Table table; @@ -251,7 +250,7 @@ public class TestRegionReplicas { LOG.info("Flushing primary region"); HRegion region = getRS().getRegionByEncodedName(hriPrimary.getEncodedName()); region.flush(true); - HRegion primaryRegion = (HRegion) region; + HRegion primaryRegion = region; // ensure that chore is run LOG.info("Sleeping for " + (4 * refreshPeriod)); @@ -347,7 +346,7 @@ public class TestRegionReplicas { if (key == endKey) key = startKey; } } catch (Exception ex) { - LOG.warn(ex); + LOG.warn(ex.toString(), ex); exceptions[0].compareAndSet(null, ex); } } @@ -367,7 +366,7 @@ public class TestRegionReplicas { } } } catch (Exception ex) { - LOG.warn(ex); + LOG.warn(ex.toString(), ex); exceptions[1].compareAndSet(null, ex); } } @@ -477,7 +476,7 @@ public class TestRegionReplicas { int sum = 0; for (HStoreFile sf : ((HStore) secondaryRegion.getStore(f)).getStorefiles()) { // Our file does not exist anymore. was moved by the compaction above. - LOG.debug(getRS().getFileSystem().exists(sf.getPath())); + LOG.debug(Boolean.toString(getRS().getFileSystem().exists(sf.getPath()))); Assert.assertFalse(getRS().getFileSystem().exists(sf.getPath())); HFileScanner scanner = sf.getReader().getScanner(false, false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java index d6b7e6f424f..22e9dd4d678 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasAreDistributed.java @@ -25,8 +25,6 @@ import java.util.Collection; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -43,11 +41,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ RegionServerTests.class, MediumTests.class }) public class TestRegionReplicasAreDistributed { - private static final Log LOG = LogFactory.getLog(TestRegionReplicasAreDistributed.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionReplicasAreDistributed.class); private static final int NB_SERVERS = 3; private static Table table; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java index 45536f2b0bf..d1bf7736554 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithModifyTable.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -43,9 +41,6 @@ import org.junit.rules.TestName; @Category({ RegionServerTests.class, MediumTests.class }) public class TestRegionReplicasWithModifyTable { - - private static final Log LOG = LogFactory.getLog(TestRegionReplicasWithModifyTable.class); - private static final int NB_SERVERS = 3; private static Table table; private static final byte[] row = "TestRegionReplicasWithModifyTable".getBytes(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java index 1646a66a04a..dd093596423 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicasWithRestartScenarios.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -50,10 +48,14 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, MediumTests.class}) public class TestRegionReplicasWithRestartScenarios { - private static final Log LOG = LogFactory.getLog(TestRegionReplicasWithRestartScenarios.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestRegionReplicasWithRestartScenarios.class); + @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()). diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java index 39339aa2ef6..301f509de68 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -62,6 +60,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Optional; @@ -77,7 +77,7 @@ import static org.junit.Assert.assertTrue; public class TestRegionServerAbort { private static final byte[] FAMILY_BYTES = Bytes.toBytes("f"); - private static final Log LOG = LogFactory.getLog(TestRegionServerAbort.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerAbort.class); private HBaseTestingUtility testUtil; private Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java index abcc4970573..e6f3a24ee6f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java @@ -27,8 +27,6 @@ import java.util.Enumeration; import java.util.List; import java.util.Locale; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -41,13 +39,15 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for the hostname specification by region server */ @Category({RegionServerTests.class, MediumTests.class}) public class TestRegionServerHostname { - private static final Log LOG = LogFactory.getLog(TestRegionServerHostname.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerHostname.class); private HBaseTestingUtility TEST_UTIL; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java index 6b2d4a1fbec..15e35a54573 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.CompatibilityFactory; @@ -61,8 +59,6 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -74,10 +70,12 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, LargeTests.class}) public class TestRegionServerMetrics { - private static final Log LOG = LogFactory.getLog(TestRegionServerMetrics.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerMetrics.class); @Rule public TestName testName = new TestName(); @@ -85,10 +83,6 @@ public class TestRegionServerMetrics { @ClassRule public static TestRule timeout = CategoryBasedTimeout.forClass(TestRegionServerMetrics.class); - static { - Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.DEBUG); - } - private static MetricsAssertHelper metricsHelper; private static MiniHBaseCluster cluster; private static HRegionServer rs; @@ -536,7 +530,7 @@ public class TestRegionServerMetrics { setMobThreshold(region, cf, 0); // closing the region forces the compaction.discharger to archive the compacted hfiles - ((HRegion) region).close(); + region.close(); // metrics are reset by the region initialization region.initialize(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java index 6e20612b7e1..6c988834742 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -50,6 +48,8 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -58,7 +58,7 @@ import org.junit.experimental.categories.Category; @Category({RegionServerTests.class, MediumTests.class}) public class TestRegionServerNoMaster { - private static final Log LOG = LogFactory.getLog(TestRegionServerNoMaster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerNoMaster.class); private static final int NB_SERVERS = 1; private static Table table; private static final byte[] row = "ee".getBytes(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java index 978c7929843..13da541bcdd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -38,6 +36,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; @@ -49,8 +49,8 @@ import java.io.IOException; @Category({MediumTests.class}) public class TestRegionServerOnlineConfigChange { - private static final Log LOG = - LogFactory.getLog(TestRegionServerOnlineConfigChange.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(TestRegionServerOnlineConfigChange.class.getName()); private static HBaseTestingUtility hbaseTestingUtility = new HBaseTestingUtility(); private static Configuration conf = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java index e554d0dff53..4ff96557d60 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.regionserver; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ClusterStatus.Option; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -56,6 +54,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; @@ -71,7 +71,8 @@ import static org.junit.Assert.fail; @Category(MediumTests.class) public class TestRegionServerReadRequestMetrics { - private static final Log LOG = LogFactory.getLog(TestRegionServerReadRequestMetrics.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestRegionServerReadRequestMetrics.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final TableName TABLE_NAME = TableName.valueOf("test"); private static final byte[] CF1 = "c1".getBytes(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java index f32a87c72a9..b72f4863558 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -41,11 +39,13 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) public class TestRegionServerReportForDuty { - private static final Log LOG = LogFactory.getLog(TestRegionServerReportForDuty.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionServerReportForDuty.class); private static final long SLEEP_INTERVAL = 500; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java index a061a8793e4..99296f4835e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java @@ -29,8 +29,6 @@ import java.util.List; import java.util.Map; import java.util.NavigableSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; @@ -65,7 +63,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -73,7 +72,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; */ @Category({RegionServerTests.class, MediumTests.class}) public class TestReversibleScanners { - private static final Log LOG = LogFactory.getLog(TestReversibleScanners.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReversibleScanners.class); HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static byte[] FAMILYNAME = Bytes.toBytes("testCf"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java index 5c716f75bab..00e573ff554 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java @@ -31,8 +31,6 @@ import java.util.List; import java.util.NavigableSet; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -56,6 +54,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test a multi-column scanner when there is a Bloom filter false-positive. @@ -65,8 +65,8 @@ import org.junit.runners.Parameterized.Parameters; @Category({RegionServerTests.class, SmallTests.class}) public class TestScanWithBloomError { - private static final Log LOG = - LogFactory.getLog(TestScanWithBloomError.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestScanWithBloomError.class); private static final String TABLE_NAME = "ScanWithBloomError"; private static final String FAMILY = "myCF"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java index e94e52e31e4..c6dce67edc5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -31,8 +31,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -63,6 +61,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test of a long-lived scanner validating as we go. @@ -73,7 +73,7 @@ public class TestScanner { @Rule public final TestRule timeout = CategoryBasedTimeout.builder(). withTimeout(this.getClass()).withLookingForStuckThread(true).build(); - private static final Log LOG = LogFactory.getLog(TestScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(TestScanner.class); private final static HBaseTestingUtility TEST_UTIL = HBaseTestingUtility.createLocalHTU(); private static final byte [] FIRST_ROW = HConstants.EMPTY_START_ROW; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java index dc32cb5c246..7429753e7fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java @@ -27,7 +27,6 @@ import java.util.List; import java.util.concurrent.Callable; import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -37,7 +36,6 @@ import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTestConst; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; @@ -46,7 +44,6 @@ import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.ScannerCallable; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.filter.Filter; @@ -56,7 +53,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.wal.WAL; -import org.apache.log4j.Level; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -122,8 +118,6 @@ public class TestScannerHeartbeatMessages { @BeforeClass public static void setUpBeforeClass() throws Exception { - ((Log4JLogger) ScannerCallable.LOG).getLogger().setLevel(Level.ALL); - ((Log4JLogger) HeartbeatRPCServices.LOG).getLogger().setLevel(Level.ALL); Configuration conf = TEST_UTIL.getConfiguration(); conf.setStrings(HConstants.REGION_IMPL, HeartbeatHRegion.class.getName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java index bd63babdf60..a6aea6a3dfc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -51,13 +49,15 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @Category({RegionServerTests.class, LargeTests.class}) public class TestScannerRetriableFailure { - private static final Log LOG = LogFactory.getLog(TestScannerRetriableFailure.class); + private static final Logger LOG = LoggerFactory.getLogger(TestScannerRetriableFailure.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java index 50b1d62df45..287f65e6c06 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java @@ -32,8 +32,6 @@ import java.util.Map; import java.util.Random; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; @@ -56,6 +54,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test various seek optimizations for correctness and check if they are @@ -65,8 +65,8 @@ import org.junit.runners.Parameterized.Parameters; @Category({RegionServerTests.class, MediumTests.class}) public class TestSeekOptimizations { - private static final Log LOG = - LogFactory.getLog(TestSeekOptimizations.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSeekOptimizations.class); // Constants private static final String FAMILY = "myCF"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java index f78a19fb0fe..613282f951c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java @@ -28,10 +28,9 @@ import static org.mockito.Mockito.when; import java.io.IOException; import java.util.List; +import java.util.Objects; import java.util.concurrent.atomic.LongAdder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; @@ -56,23 +55,20 @@ import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZNodePaths; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.ZooDefs.Ids; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, MediumTests.class}) public class TestSplitLogWorker { - private static final Log LOG = LogFactory.getLog(TestSplitLogWorker.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSplitLogWorker.class); private static final int WAIT_TIME = 15000; private final ServerName MANAGER = ServerName.valueOf("manager,1,1"); - static { - Logger.getLogger("org.apache.hadoop.hbase").setLevel(Level.DEBUG); - } private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private DummyServer ds; @@ -430,7 +426,7 @@ public class TestSplitLogWorker { waitForCounter(SplitLogCounters.tot_wkr_task_acquired_rescan, 0, 1, WAIT_TIME); List nodes = ZKUtil.listChildrenNoWatch(zkw, zkw.znodePaths.splitLogZNode); - LOG.debug(nodes); + LOG.debug(Objects.toString(nodes)); int num = 0; for (String node : nodes) { num++; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java index 619ffd081f3..a86ec2f193e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java @@ -34,8 +34,6 @@ import java.util.Optional; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -101,7 +99,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProto @Category({RegionServerTests.class, LargeTests.class}) @SuppressWarnings("deprecation") public class TestSplitTransactionOnCluster { - private static final Log LOG = LogFactory.getLog(TestSplitTransactionOnCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSplitTransactionOnCluster.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); private Admin admin = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java index 4db5734a849..073845e06e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java @@ -27,8 +27,6 @@ import java.util.Collection; import java.util.Map; import org.apache.commons.lang3.mutable.MutableBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -56,6 +54,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Matchers; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testcase for https://issues.apache.org/jira/browse/HBASE-13811 @@ -63,7 +63,7 @@ import org.mockito.stubbing.Answer; @Category({ MediumTests.class }) public class TestSplitWalDataLoss { - private static final Log LOG = LogFactory.getLog(TestSplitWalDataLoss.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSplitWalDataLoss.class); private final HBaseTestingUtility testUtil = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java index f4758e7f490..1f5db50b35e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java @@ -37,8 +37,6 @@ import java.util.OptionalInt; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; @@ -63,11 +61,13 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; // Can't be small as it plays with EnvironmentEdgeManager @Category({RegionServerTests.class, MediumTests.class}) public class TestStoreScanner { - private static final Log LOG = LogFactory.getLog(TestStoreScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(TestStoreScanner.class); @Rule public TestName name = new TestName(); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java index 5278f3f7ac4..62d22d201ba 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALLockup.java @@ -26,8 +26,6 @@ import java.util.TreeMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -66,6 +64,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing for lock up of WAL subsystem. @@ -73,7 +73,7 @@ import org.mockito.Mockito; */ @Category({MediumTests.class}) public class TestWALLockup { - private static final Log LOG = LogFactory.getLog(TestWALLockup.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALLockup.class); @Rule public TestName name = new TestName(); private static final String COLUMN_FAMILY = "MyCF"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java index b5378909b37..99db2087e85 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java @@ -26,8 +26,6 @@ import java.util.Arrays; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -61,6 +59,8 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test for HBASE-17471. @@ -76,7 +76,7 @@ import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) @Category({ RegionServerTests.class, SmallTests.class }) public class TestWALMonotonicallyIncreasingSeqId { - private final Log LOG = LogFactory.getLog(getClass()); + private final Logger LOG = LoggerFactory.getLogger(getClass()); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Path testDir = TEST_UTIL.getDataTestDir("TestWALMonotonicallyIncreasingSeqId"); private WALFactory wals; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java index 9514f9c564f..380de9e92e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java @@ -26,8 +26,6 @@ import java.util.Iterator; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestCase; @@ -43,10 +41,12 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, SmallTests.class}) public class TestWideScanner extends HBaseTestCase { - private static final Log LOG = LogFactory.getLog(TestWideScanner.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWideScanner.class); static final byte[] A = Bytes.toBytes("A"); static final byte[] B = Bytes.toBytes("B"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java index 3d5ed44e1c3..30ab007f694 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestCompactionScanQueryMatcher.java @@ -25,8 +25,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; @@ -41,11 +39,13 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ RegionServerTests.class, SmallTests.class }) public class TestCompactionScanQueryMatcher extends AbstractTestScanQueryMatcher { - private static final Log LOG = LogFactory.getLog(TestCompactionScanQueryMatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCompactionScanQueryMatcher.class); @Test public void testMatch_PartialRangeDropDeletes() throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java index 4cde038b200..aec93cbb3a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -38,11 +36,13 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ RegionServerTests.class, SmallTests.class }) public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher { - private static final Log LOG = LogFactory.getLog(TestUserScanQueryMatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(TestUserScanQueryMatcher.class); /** * This is a cryptic test. It is checking that we don't include a fake cell, one that has a diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java index fe33d860ab4..1d3834d870e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.ThreadLocalRandom; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MiniHBaseCluster; @@ -52,11 +50,13 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ RegionServerTests.class, MediumTests.class }) public class TestCompactionWithThroughputController { - - private static final Log LOG = LogFactory.getLog(TestCompactionWithThroughputController.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestCompactionWithThroughputController.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java index 3c1228e2f91..cb6155845cc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java @@ -18,8 +18,6 @@ import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MiniHBaseCluster; @@ -48,10 +46,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) public class TestFlushWithThroughputController { - private static final Log LOG = LogFactory.getLog(TestFlushWithThroughputController.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFlushWithThroughputController.class); private static final double EPSILON = 1E-6; private HBaseTestingUtility hbtu; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java index 9481018ace1..093a512a74f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java @@ -33,8 +33,6 @@ import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -74,10 +72,12 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class AbstractTestFSWAL { - protected static final Log LOG = LogFactory.getLog(AbstractTestFSWAL.class); + protected static final Logger LOG = LoggerFactory.getLogger(AbstractTestFSWAL.class); protected static Configuration CONF; protected static FileSystem FS; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java index 85fcaff6b62..84a62169490 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRollPeriod.java @@ -22,8 +22,6 @@ import static org.junit.Assert.assertFalse; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -35,12 +33,14 @@ import org.apache.hadoop.hbase.wal.WAL; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests that verifies that the log is forced to be rolled every "hbase.regionserver.logroll.period" */ public abstract class AbstractTestLogRollPeriod { - private static final Log LOG = LogFactory.getLog(AbstractTestLogRollPeriod.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractTestLogRollPeriod.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -110,7 +110,7 @@ public abstract class AbstractTestLogRollPeriod { Thread.sleep(LOG_ROLL_PERIOD / 16); } } catch (Exception e) { - LOG.warn(e); + LOG.warn(e.toString(), e); } } }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java index 68d71b0958d..a323db6ac47 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -57,12 +55,14 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test log deletion as logs are rolled. */ public abstract class AbstractTestLogRolling { - private static final Log LOG = LogFactory.getLog(AbstractTestLogRolling.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractTestLogRolling.class); protected HRegionServer server; protected String tableName; protected byte[] value; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java index 60951aa068c..ededcf3cfab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java @@ -44,8 +44,6 @@ import java.util.TreeMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; @@ -111,12 +109,14 @@ import org.junit.rules.TestName; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test replay of edits out of a WAL split. */ public abstract class AbstractTestWALReplay { - private static final Log LOG = LogFactory.getLog(AbstractTestWALReplay.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractTestWALReplay.class); static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final EnvironmentEdge ee = EnvironmentEdgeManager.getDelegate(); private Path hbaseRootDir = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java index 05addd2e44b..cb45f49ce1e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java @@ -144,7 +144,7 @@ public class TestFSHLog extends AbstractTestFSWAL { try { holdAppend.await(); } catch (InterruptedException e) { - LOG.error(e); + LOG.error(e.toString(), e); } } } @@ -170,7 +170,7 @@ public class TestFSHLog extends AbstractTestFSWAL { region.put(new Put(b).addColumn(b, b,b)); putFinished.countDown(); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } } }); @@ -187,7 +187,7 @@ public class TestFSHLog extends AbstractTestFSWAL { LOG.info("Flush succeeded:" + flushResult.isFlushSucceeded()); flushFinished.countDown(); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString(), e); } } }); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java index 665ceeb9a92..6c2fd9f4fd1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.NavigableMap; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -39,6 +37,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -58,6 +57,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for conditions that should trigger RegionServer aborts when @@ -65,7 +66,7 @@ import org.junit.experimental.categories.Category; */ @Category({RegionServerTests.class, MediumTests.class}) public class TestLogRollAbort { - private static final Log LOG = LogFactory.getLog(AbstractTestLogRolling.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractTestLogRolling.class); private static MiniDFSCluster dfsCluster; private static Admin admin; private static MiniHBaseCluster cluster; @@ -164,7 +165,7 @@ public class TestLogRollAbort { // not reliable now that sync plays a roll in wall rolling. The above puts also now call // sync. } catch (Throwable t) { - LOG.fatal("FAILED TEST: Got wrong exception", t); + LOG.error(HBaseMarkers.FATAL, "FAILED TEST: Got wrong exception", t); } } finally { table.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java index dd4ca097a8c..12278eb3c05 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java @@ -30,8 +30,6 @@ import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -48,7 +46,6 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests; import org.apache.hadoop.hbase.util.Bytes; @@ -64,11 +61,13 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ VerySlowRegionServerTests.class, LargeTests.class }) public class TestLogRolling extends AbstractTestLogRolling { - private static final Log LOG = LogFactory.getLog(TestLogRolling.class); + private static final Logger LOG = LoggerFactory.getLogger(TestLogRolling.class); @Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()). withLookingForStuckThread(true).build(); @@ -355,7 +354,7 @@ public class TestLogRolling extends AbstractTestLogRolling { // a failed append could not be followed by a successful // sync. What is coming out here is a failed sync, a sync // that used to 'pass'. - LOG.info(e); + LOG.info(e.toString(), e); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java index c990680b283..054910056a4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java @@ -22,8 +22,7 @@ import java.io.IOException; import java.util.NavigableMap; import java.util.TreeMap; import java.util.concurrent.ThreadLocalRandom; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -47,6 +46,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test many concurrent appenders to an WAL while rolling the log. @@ -119,7 +120,7 @@ public class TestLogRollingNoCluster { * Appender thread. Appends to passed wal file. */ static class Appender extends Thread { - private final Log log; + private final Logger log; private final WAL wal; private final int count; private Exception e = null; @@ -128,7 +129,7 @@ public class TestLogRollingNoCluster { super("" + index); this.wal = wal; this.count = count; - this.log = LogFactory.getLog("Appender:" + getName()); + this.log = LoggerFactory.getLogger("Appender:" + getName()); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java index f44c2ea7b3e..3a5ca0e81a4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java @@ -31,8 +31,6 @@ import java.util.Optional; import java.util.Random; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -82,11 +80,13 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, LargeTests.class}) public class TestMasterReplication { - private static final Log LOG = LogFactory.getLog(TestReplicationBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationBase.class); private Configuration baseConfiguration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java index 871cd190bc1..9da0745f839 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java @@ -30,8 +30,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.*; @@ -53,11 +51,13 @@ import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, LargeTests.class}) public class TestMultiSlaveReplication { - private static final Log LOG = LogFactory.getLog(TestReplicationBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationBase.class); private static Configuration conf1; private static Configuration conf2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java index 0d7a92d9774..ed711233a6a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestNamespaceReplication.java @@ -30,8 +30,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; @@ -51,11 +49,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MediumTests.class}) public class TestNamespaceReplication extends TestReplicationBase { - private static final Log LOG = LogFactory.getLog(TestNamespaceReplication.class); + private static final Logger LOG = LoggerFactory.getLogger(TestNamespaceReplication.class); private static String ns1 = "ns1"; private static String ns2 = "ns2"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java index e9c352d0585..bacda634e35 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java @@ -25,8 +25,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -57,13 +55,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.*; @Category({FlakeyTests.class, LargeTests.class}) public class TestPerTableCFReplication { - private static final Log LOG = LogFactory.getLog(TestPerTableCFReplication.class); + private static final Logger LOG = LoggerFactory.getLogger(TestPerTableCFReplication.class); private static Configuration conf1; private static Configuration conf2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java index f7d1009e6b9..0592e2d701d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.NavigableMap; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -45,6 +43,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is only a base for other integration-level replication tests. @@ -58,7 +58,7 @@ public class TestReplicationBase { ((Log4JLogger) ReplicationSource.LOG).getLogger().setLevel(Level.ALL); }*/ - private static final Log LOG = LogFactory.getLog(TestReplicationBase.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationBase.class); protected static Configuration conf1 = HBaseConfiguration.create(); protected static Configuration conf2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java index 557ed99e402..abf8d261b6e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java @@ -24,8 +24,6 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -39,14 +37,16 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test handling of changes to the number of a peer's regionservers. */ @Category({ReplicationTests.class, LargeTests.class}) public class TestReplicationChangingPeerRegionservers extends TestReplicationBase { - - private static final Log LOG = LogFactory.getLog(TestReplicationChangingPeerRegionservers.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestReplicationChangingPeerRegionservers.class); /** * @throws java.lang.Exception diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java index c9579d607da..1675496abad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.replication; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -28,14 +26,16 @@ import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.fail; @Category({ReplicationTests.class, LargeTests.class}) public class TestReplicationDisableInactivePeer extends TestReplicationBase { - - private static final Log LOG = LogFactory.getLog(TestReplicationDisableInactivePeer.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestReplicationDisableInactivePeer.class); /** * Test disabling an inactive peer. Add a peer which is inactive, trying to diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java index df9cff25ece..044e55d7ef0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDroppedTables.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.replication; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.fail; @@ -49,7 +49,7 @@ import java.util.List; @Category(LargeTests.class) public class TestReplicationDroppedTables extends TestReplicationBase { - private static final Log LOG = LogFactory.getLog(TestReplicationDroppedTables.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationDroppedTables.class); /** * @throws java.lang.Exception diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java index b76ebb1a2c0..c5f83419bff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java @@ -30,8 +30,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.Connection; @@ -60,13 +58,15 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests ReplicationSource and ReplicationEndpoint interactions */ @Category({ReplicationTests.class, MediumTests.class}) public class TestReplicationEndpoint extends TestReplicationBase { - private static final Log LOG = LogFactory.getLog(TestReplicationEndpoint.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationEndpoint.class); static int numRegionServers; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java index 5739aee16af..30cd8602909 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.replication; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.client.Result; @@ -28,13 +26,15 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.fail; @Category({ReplicationTests.class, LargeTests.class}) public class TestReplicationKillRS extends TestReplicationBase { - private static final Log LOG = LogFactory.getLog(TestReplicationKillRS.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationKillRS.class); /** * Load up 1 tables over 2 region servers and kill a source during diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java index 122860532c8..48d8924377b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java @@ -29,8 +29,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -61,13 +59,15 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, MediumTests.class}) public class TestReplicationSource { - private static final Log LOG = - LogFactory.getLog(TestReplicationSource.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestReplicationSource.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static HBaseTestingUtility TEST_UTIL_PEER = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java index 15d15b3dafd..29c093000e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateBasic.java @@ -23,8 +23,6 @@ import static org.junit.Assert.*; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.util.Pair; @@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.zookeeper.KeeperException; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * White box testing for replication state interfaces. Implementations should extend this class, and @@ -60,7 +60,7 @@ public abstract class TestReplicationStateBasic { protected static final int ZK_MAX_COUNT = 300; protected static final int ZK_SLEEP_INTERVAL = 100; // millis - private static final Log LOG = LogFactory.getLog(TestReplicationStateBasic.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationStateBasic.class); @Before public void setUp() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java index 15fc78f3fde..231d655e2c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateZKImpl.java @@ -23,8 +23,6 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; @@ -51,11 +49,13 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, MediumTests.class}) public class TestReplicationStateZKImpl extends TestReplicationStateBasic { - private static final Log LOG = LogFactory.getLog(TestReplicationStateZKImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationStateZKImpl.class); private static Configuration conf; private static HBaseTestingUtility utility; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java index 7a2377fb9de..8532dff0862 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStatus.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.util.EnumSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; @@ -37,10 +35,12 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, MediumTests.class}) public class TestReplicationStatus extends TestReplicationBase { - private static final Log LOG = LogFactory.getLog(TestReplicationStatus.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationStatus.class); private static final String PEER_ID = "2"; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java index edfff9a90ea..0a602ada736 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.replication; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; @@ -47,7 +47,7 @@ import static org.junit.Assert.assertEquals; @Category({ReplicationTests.class, LargeTests.class}) public class TestReplicationSyncUpTool extends TestReplicationBase { - private static final Log LOG = LogFactory.getLog(TestReplicationSyncUpTool.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSyncUpTool.class); private static final TableName t1_su = TableName.valueOf("t1_syncup"); private static final TableName t2_su = TableName.valueOf("t2_syncup"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java index c2481c4c11d..a04d5247d80 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationTrackerZKImpl.java @@ -27,8 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; @@ -52,6 +50,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class tests the ReplicationTrackerZKImpl class and ReplicationListener interface. One @@ -63,7 +63,7 @@ import org.junit.experimental.categories.Category; @Category({ReplicationTests.class, MediumTests.class}) public class TestReplicationTrackerZKImpl { - private static final Log LOG = LogFactory.getLog(TestReplicationTrackerZKImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationTrackerZKImpl.class); private static Configuration conf; private static HBaseTestingUtility utility; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index 924a72cd774..e3c1959ac42 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; @@ -66,11 +64,13 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, LargeTests.class}) public class TestReplicationWithTags { - private static final Log LOG = LogFactory.getLog(TestReplicationWithTags.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationWithTags.class); private static final byte TAG_TYPE = 1; private static Configuration conf1 = HBaseConfiguration.create(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java index 1c5aa7190ea..3185a5be32e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestSerialReplication.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -61,10 +59,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ReplicationTests.class, LargeTests.class }) public class TestSerialReplication { - private static final Log LOG = LogFactory.getLog(TestSerialReplication.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSerialReplication.class); private static Configuration conf1; private static Configuration conf2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java index e78abfb74b4..2993043b5b5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/master/TestTableCFsUpdater.java @@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.replication.master; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -41,6 +39,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.List; import java.util.Map; @@ -52,7 +52,7 @@ import static org.junit.Assert.assertTrue; @Category({ReplicationTests.class, SmallTests.class}) public class TestTableCFsUpdater extends ReplicationPeerConfigUpgrader { - private static final Log LOG = LogFactory.getLog(TestTableCFsUpdater.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTableCFsUpdater.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static ZKWatcher zkw = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java index 9b1648f6319..fab125b8bcc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestGlobalThrottler.java @@ -24,8 +24,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -56,10 +54,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ReplicationTests.class, LargeTests.class }) public class TestGlobalThrottler { - private static final Log LOG = LogFactory.getLog(TestGlobalThrottler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestGlobalThrottler.class); private static Configuration conf1; private static Configuration conf2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java index 7e0f09024b7..41272efbb7b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java @@ -26,9 +26,6 @@ import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -41,7 +38,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionLocator; -import org.apache.hadoop.hbase.client.RpcRetryingCallerImpl; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -56,7 +52,6 @@ import org.apache.hadoop.hbase.testclassification.FlakeyTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.hadoop.hbase.zookeeper.ZKConfig; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; @@ -65,6 +60,8 @@ import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests RegionReplicaReplicationEndpoint class by setting up region replicas and verifying @@ -72,12 +69,8 @@ import org.junit.rules.TestName; */ @Category({FlakeyTests.class, MediumTests.class}) public class TestRegionReplicaReplicationEndpoint { - - private static final Log LOG = LogFactory.getLog(TestRegionReplicaReplicationEndpoint.class); - - static { - ((Log4JLogger) RpcRetryingCallerImpl.LOG).getLogger().setLevel(Level.ALL); - } + private static final Logger LOG = + LoggerFactory.getLogger(TestRegionReplicaReplicationEndpoint.class); private static final int NB_SERVERS = 2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java index dc0ca084229..039b667c40c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java @@ -33,8 +33,6 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -71,10 +69,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, MediumTests.class}) public class TestReplicationSink { - private static final Log LOG = LogFactory.getLog(TestReplicationSink.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationSink.class); private static final int BATCH_SIZE = 10; protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java index c4d079d892e..307ea7f42d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java @@ -40,8 +40,7 @@ import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -94,7 +93,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -108,8 +108,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescr @Category({ReplicationTests.class, MediumTests.class}) public abstract class TestReplicationSourceManager { - protected static final Log LOG = - LogFactory.getLog(TestReplicationSourceManager.class); + protected static final Logger LOG = + LoggerFactory.getLogger(TestReplicationSourceManager.class); protected static Configuration conf; @@ -274,7 +274,7 @@ public abstract class TestReplicationSourceManager { if(i > 1 && i % 20 == 0) { wal.rollWriter(); } - LOG.info(i); + LOG.info(Long.toString(i)); final long txid = wal.append( hri, new WALKeyImpl(hri.getEncodedNameAsBytes(), test, System.currentTimeMillis(), mvcc, scopes), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java index 8e2618c1bde..eb6b9cf2874 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationThrottler.java @@ -21,18 +21,18 @@ package org.apache.hadoop.hbase.replication.regionserver; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ReplicationTests.class, SmallTests.class}) public class TestReplicationThrottler { - private static final Log LOG = LogFactory.getLog(TestReplicationThrottler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestReplicationThrottler.class); /** * unit test for throttling diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java index 4a460740059..40b76d4c4dc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicator.java @@ -24,8 +24,6 @@ import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -51,13 +49,15 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.Ignore; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category(MediumTests.class) @Ignore("Flaky, needs to be rewritten, see HBASE-19125") public class TestReplicator extends TestReplicationBase { - static final Log LOG = LogFactory.getLog(TestReplicator.class); + static final Logger LOG = LoggerFactory.getLogger(TestReplicator.class); static final int NUM_ROWS = 10; @BeforeClass diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java index 4d402b7310d..655d9ba4f0c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.security; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings; import org.apache.hadoop.security.UserGroupInformation; @@ -32,7 +31,7 @@ import java.net.InetAddress; @InterfaceAudience.Private public class HBaseKerberosUtils { - private static final Log LOG = LogFactory.getLog(HBaseKerberosUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(HBaseKerberosUtils.class); public static final String KRB_PRINCIPAL = "hbase.regionserver.kerberos.principal"; public static final String MASTER_KRB_PRINCIPAL = "hbase.master.kerberos.principal"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java index 0af7c43190d..5986e0b8260 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java @@ -23,8 +23,6 @@ import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import org.apache.commons.lang3.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -33,14 +31,15 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.security.UserGroupInformation; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; import static org.junit.Assert.*; @Category({SecurityTests.class, SmallTests.class}) public class TestUser { - private static final Log LOG = LogFactory.getLog(TestUser.class); + private static final Logger LOG = LoggerFactory.getLogger(TestUser.class); @Test public void testCreateUserForTestingGroupCache() throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index e477026bf30..cc01d62c88d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -30,8 +30,7 @@ import java.util.concurrent.CountDownLatch; import com.google.protobuf.BlockingRpcChannel; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -67,6 +66,8 @@ import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; @@ -76,7 +77,7 @@ import static org.junit.Assert.fail; */ public class SecureTestUtil { - private static final Log LOG = LogFactory.getLog(SecureTestUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(SecureTestUtil.class); private static final int WAIT_TIME = 10000; public static void configureSuperuser(Configuration conf) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index 67914658df1..138a40e3111 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -36,8 +36,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -128,14 +127,14 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.apache.hadoop.hbase.util.Threads; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Performs authorization checks for common operations, according to different @@ -143,14 +142,7 @@ import org.junit.rules.TestName; */ @Category({SecurityTests.class, LargeTests.class}) public class TestAccessController extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestAccessController.class); - - static { - Logger.getLogger(AccessController.class).setLevel(Level.TRACE); - Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); - Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); - } - + private static final Logger LOG = LoggerFactory.getLogger(TestAccessController.class); private static TableName TEST_TABLE = TableName.valueOf("testtable1"); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java index f9d0b1c219f..7b7a150fefd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java @@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -64,10 +62,12 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({SecurityTests.class, LargeTests.class}) public class TestAccessController2 extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestAccessController2.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAccessController2.class); private static final byte[] TEST_ROW = Bytes.toBytes("test"); private static final byte[] TEST_FAMILY = Bytes.toBytes("f"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java index 11dbbd3469d..babba153bf9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController3.java @@ -21,8 +21,6 @@ import static org.apache.hadoop.hbase.AuthUtil.toGroupEntry; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -47,14 +45,14 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Performs checks for reference counting w.r.t. TableAuthManager which is used by @@ -65,14 +63,7 @@ import org.junit.rules.TestName; */ @Category({SecurityTests.class, MediumTests.class}) public class TestAccessController3 extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestAccessController.class); - - static { - Logger.getLogger(AccessController.class).setLevel(Level.TRACE); - Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); - Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); - } - + private static final Logger LOG = LoggerFactory.getLogger(TestAccessController.class); private static TableName TEST_TABLE = TableName.valueOf("testtable1"); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java index 793f75b3fe9..c9a1e6f1382 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java @@ -25,8 +25,6 @@ import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Coprocessor; @@ -34,6 +32,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableNotFoundException; +import org.apache.hadoop.hbase.TestTableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -50,10 +49,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.apache.hadoop.hbase.TestTableName; import org.apache.hadoop.hbase.util.Threads; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -61,16 +57,12 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({SecurityTests.class, MediumTests.class}) public class TestCellACLWithMultipleVersions extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestCellACLWithMultipleVersions.class); - - static { - Logger.getLogger(AccessController.class).setLevel(Level.TRACE); - Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); - Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); - } + private static final Logger LOG = LoggerFactory.getLogger(TestCellACLWithMultipleVersions.class); @Rule public TestTableName TEST_TABLE = new TestTableName(); @@ -569,36 +561,36 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { new String[] { user2.getShortName(), AuthUtil.toGroupEntry(GROUP), USER_OWNER.getShortName() }, Action.READ, Action.WRITE); Put p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO); p.setACL(permsU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 123, ZERO); - p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 123, ZERO); + p.addColumn(TEST_FAMILY2, TEST_Q1, 123, ZERO); + p.addColumn(TEST_FAMILY2, TEST_Q2, 123, ZERO); p.setACL(permsU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 125, ZERO); - p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 125, ZERO); + p.addColumn(TEST_FAMILY2, TEST_Q1, 125, ZERO); + p.addColumn(TEST_FAMILY2, TEST_Q2, 125, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 129, ZERO); - p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 129, ZERO); + p.addColumn(TEST_FAMILY2, TEST_Q1, 129, ZERO); + p.addColumn(TEST_FAMILY2, TEST_Q2, 129, ZERO); p.setACL(permsU1andOwner); t.put(p); } @@ -675,20 +667,20 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { new String[] { user2.getShortName(), AuthUtil.toGroupEntry(GROUP), USER_OWNER.getShortName() }, Action.READ, Action.WRITE); Put p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO); p.setACL(permsU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(permsU1andOwner); t.put(p); } @@ -767,20 +759,20 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { permsU2andGUandOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); Put p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO); p.setACL(permsU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(permsU1andOwner); t.put(p); } @@ -798,7 +790,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table t = connection.getTable(TEST_TABLE.getTableName())) { Put p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 125, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 125, ZERO); p.addColumn(TEST_FAMILY1, TEST_Q2, ZERO); p.setACL(user2.getShortName(), new Permission(Permission.Action.READ, Permission.Action.WRITE)); @@ -863,26 +855,26 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil { AuthUtil.toGroupEntry(GROUP) }, Action.READ, Action.WRITE); Put p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 120, ZERO); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 120, ZERO); - p.addColumn(TEST_FAMILY1, TEST_Q3, (long) 120, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 120, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 120, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q3, 120, ZERO); p.setACL(permsU1andU2andGUandOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO); - p.addColumn(TEST_FAMILY1, TEST_Q3, (long) 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 123, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q3, 123, ZERO); p.setACL(permsU1andOwner); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q1, 127, ZERO); p.setACL(permsU1_U2andGU); t.put(p); p = new Put(TEST_ROW1); - p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO); + p.addColumn(TEST_FAMILY1, TEST_Q2, 127, ZERO); p.setACL(user2.getShortName(), new Permission(Permission.Action.READ)); t.put(p); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java index 3aa97b74977..319c75a5f43 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java @@ -23,8 +23,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; @@ -34,6 +32,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableNotFoundException; +import org.apache.hadoop.hbase.TestTableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -52,10 +51,7 @@ import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.TestTableName; import org.apache.hadoop.hbase.util.Threads; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -63,18 +59,14 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({SecurityTests.class, LargeTests.class}) public class TestCellACLs extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestCellACLs.class); - - static { - Logger.getLogger(AccessController.class).setLevel(Level.TRACE); - Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); - Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); - } + private static final Logger LOG = LoggerFactory.getLogger(TestCellACLs.class); @Rule public TestTableName TEST_TABLE = new TestTableName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java index ac13c8b5153..a357c1fc4c7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java @@ -23,8 +23,6 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.Optional; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CategoryBasedTimeout; @@ -50,13 +48,16 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Performs coprocessor loads for variuos paths and malformed strings */ @Category({SecurityTests.class, MediumTests.class}) public class TestCoprocessorWhitelistMasterObserver extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestCoprocessorWhitelistMasterObserver.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestCoprocessorWhitelistMasterObserver.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final TableName TEST_TABLE = TableName.valueOf("testTable"); private static final byte[] TEST_FAMILY = Bytes.toBytes("fam1"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java index 2b0091d658d..7db5fe00d44 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java @@ -24,9 +24,8 @@ import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Objects; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -53,14 +52,15 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import com.google.protobuf.BlockingRpcChannel; @Category({SecurityTests.class, MediumTests.class}) public class TestNamespaceCommands extends SecureTestUtil { private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestNamespaceCommands.class); + private static final Logger LOG = LoggerFactory.getLogger(TestNamespaceCommands.class); private static String TEST_NAMESPACE = "ns1"; private static String TEST_NAMESPACE2 = "ns2"; private static Configuration conf; @@ -204,7 +204,7 @@ public class TestNamespaceCommands extends SecureTestUtil { perms = AccessControlLists.getNamespacePermissions(conf, TEST_NAMESPACE); for (Map.Entry entry : perms.entries()) { - LOG.debug(entry); + LOG.debug(Objects.toString(entry)); } assertEquals(6, perms.size()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java index f60209fff7f..fcf989b67a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableNotFoundException; +import org.apache.hadoop.hbase.TestTableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -44,9 +45,6 @@ import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.TestTableName; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -54,16 +52,12 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({SecurityTests.class, MediumTests.class}) public class TestScanEarlyTermination extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestScanEarlyTermination.class); - - static { - Logger.getLogger(AccessController.class).setLevel(Level.TRACE); - Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); - Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); - } + private static final Logger LOG = LoggerFactory.getLogger(TestScanEarlyTermination.class); @Rule public TestTableName TEST_TABLE = new TestTableName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java index 607ea8c5f22..1270e8a374e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java @@ -29,8 +29,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.TableName; @@ -50,7 +48,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; @@ -59,7 +58,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; */ @Category({SecurityTests.class, LargeTests.class}) public class TestTablePermissions { - private static final Log LOG = LogFactory.getLog(TestTablePermissions.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTablePermissions.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static ZKWatcher ZKW; private final static Abortable ABORTABLE = new Abortable() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java index 2fd3909344b..58e7737518c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java @@ -17,26 +17,24 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertEquals; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.TableNotFoundException; +import org.apache.hadoop.hbase.TestTableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Connection; @@ -62,18 +60,14 @@ import org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; -import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost; -import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; -import org.apache.hadoop.hbase.TestTableName; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; +import org.apache.hadoop.hbase.wal.WALEdit; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -81,19 +75,14 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({SecurityTests.class, LargeTests.class}) public class TestWithDisabledAuthorization extends SecureTestUtil { - private static final Log LOG = LogFactory.getLog(TestWithDisabledAuthorization.class); - - static { - Logger.getLogger(AccessController.class).setLevel(Level.TRACE); - Logger.getLogger(AccessControlFilter.class).setLevel(Level.TRACE); - Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE); - } - + private static final Logger LOG = LoggerFactory.getLogger(TestWithDisabledAuthorization.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] TEST_FAMILY = Bytes.toBytes("f1"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java index 18fb15fd522..1bb86f4f5e6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionWatcher.java @@ -24,8 +24,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.TableName; @@ -39,13 +37,15 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the reading and writing of access permissions to and from zookeeper. */ @Category({SecurityTests.class, LargeTests.class}) public class TestZKPermissionWatcher { - private static final Log LOG = LogFactory.getLog(TestZKPermissionWatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKPermissionWatcher.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static TableAuthManager AUTH_A; private static TableAuthManager AUTH_B; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index d61f98e3c6a..feffdee42ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -32,8 +32,6 @@ import java.util.Collection; import java.util.List; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; @@ -57,6 +55,7 @@ import org.apache.hadoop.hbase.ipc.RpcServerFactory; import org.apache.hadoop.hbase.ipc.RpcServerInterface; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.SimpleRpcServer; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -64,9 +63,6 @@ import org.apache.hadoop.hbase.regionserver.OnlineRegions; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.security.SecurityInfo; import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; @@ -92,6 +88,12 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import com.google.protobuf.BlockingService; import com.google.protobuf.RpcController; @@ -114,7 +116,7 @@ public class TestTokenAuthentication { System.setProperty("java.security.krb5.realm", "hbase"); System.setProperty("java.security.krb5.kdc", "blah"); } - private static final Log LOG = LogFactory.getLog(TestTokenAuthentication.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTokenAuthentication.class); public interface AuthenticationServiceSecurityInfo {} @@ -123,7 +125,7 @@ public class TestTokenAuthentication { */ private static class TokenServer extends TokenProvider implements AuthenticationProtos.AuthenticationService.BlockingInterface, Runnable, Server { - private static final Log LOG = LogFactory.getLog(TokenServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TokenServer.class); private Configuration conf; private HBaseTestingUtility TEST_UTIL; private RpcServerInterface rpcServer; @@ -252,7 +254,7 @@ public class TestTokenAuthentication { @Override public void abort(String reason, Throwable error) { - LOG.fatal("Aborting on: "+reason, error); + LOG.error(HBaseMarkers.FATAL, "Aborting on: "+reason, error); this.aborted = true; this.stopped = true; sleeper.skipSleepCycle(); @@ -339,6 +341,7 @@ public class TestTokenAuthentication { started = true; } + @Override public void run() { try { initialize(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java index ea0733874d7..1e5ea533c92 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java @@ -26,8 +26,6 @@ import static org.junit.Assert.assertTrue; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -40,6 +38,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the synchronization of token authentication master keys through @@ -47,7 +47,7 @@ import org.junit.experimental.categories.Category; */ @Category({SecurityTests.class, LargeTests.class}) public class TestZKSecretWatcher { - private static final Log LOG = LogFactory.getLog(TestZKSecretWatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKSecretWatcher.class); private static HBaseTestingUtility TEST_UTIL; private static AuthenticationTokenSecretManager KEY_MASTER; private static AuthenticationTokenSecretManagerForTest KEY_SLAVE; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java index 1757ddd0b17..391a8447859 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcherRefreshKeys.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hbase.security.token; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -35,13 +33,15 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the refreshKeys in ZKSecretWatcher */ @Category({ SecurityTests.class, SmallTests.class }) public class TestZKSecretWatcherRefreshKeys { - private static final Log LOG = LogFactory.getLog(TestZKSecretWatcherRefreshKeys.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKSecretWatcherRefreshKeys.class); private static HBaseTestingUtility TEST_UTIL; private static class MockAbortable implements Abortable { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index 29ddfce402b..9d60e1088a4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -32,8 +32,6 @@ import java.util.Iterator; import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.AuthUtil; @@ -65,6 +63,8 @@ import org.apache.hadoop.hbase.security.visibility.expression.Operator; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a VisibilityLabelService where labels in Mutation's visibility @@ -74,8 +74,8 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelService { - - private static final Log LOG = LogFactory.getLog(ExpAsStringVisibilityLabelServiceImpl.class); + private static final Logger LOG = + LoggerFactory.getLogger(ExpAsStringVisibilityLabelServiceImpl.class); private static final byte[] DUMMY_VALUE = new byte[0]; private static final byte STRING_SERIALIZATION_FORMAT = 2; @@ -284,7 +284,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer authLabels = (authLabels == null) ? new ArrayList<>() : authLabels; authorizations = new Authorizations(authLabels); } catch (Throwable t) { - LOG.error(t); + LOG.error(t.toString(), t); throw new IOException(t); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java index 69040811bda..521cafe7ec8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelReplicationWithExpAsString.java @@ -25,8 +25,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.security.PrivilegedExceptionAction; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; @@ -54,11 +52,13 @@ import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.junit.Before; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ SecurityTests.class, MediumTests.class }) public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilityLabelsReplication { - private static final Log LOG = LogFactory - .getLog(TestVisibilityLabelReplicationWithExpAsString.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestVisibilityLabelReplicationWithExpAsString.class); @Override @Before diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java index dfa0e739970..58be8f98807 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java @@ -29,8 +29,6 @@ import java.util.List; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; @@ -81,10 +79,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ SecurityTests.class, MediumTests.class }) public class TestVisibilityLabelsReplication { - private static final Log LOG = LogFactory.getLog(TestVisibilityLabelsReplication.class); + private static final Logger LOG = LoggerFactory.getLogger(TestVisibilityLabelsReplication.class); protected static final int NON_VIS_TAG_TYPE = 100; protected static final String TEMP = "temp"; protected static Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java index ab3440cdaab..189b37f9357 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java @@ -29,8 +29,6 @@ import java.security.PrivilegedExceptionAction; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -53,12 +51,14 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibilityLabels { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( TestVisibilityLabelsWithDefaultVisLabelService.class); @BeforeClass diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java index de8fa23bf76..0a7d918fd45 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.security.visibility; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; @@ -56,6 +54,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InterruptedIOException; @@ -73,7 +73,7 @@ import static org.junit.Assert.assertTrue; */ @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsWithDeletes { - private static final Log LOG = LogFactory.getLog(TestVisibilityLabelsWithDeletes.class); + private static final Logger LOG = LoggerFactory.getLogger(TestVisibilityLabelsWithDeletes.class); private static final String TOPSECRET = "TOPSECRET"; private static final String PUBLIC = "PUBLIC"; private static final String PRIVATE = "PRIVATE"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java index 3d53a1e6371..43ba304102d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java @@ -18,7 +18,9 @@ package org.apache.hadoop.hbase.security.visibility; import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; @@ -54,7 +56,7 @@ import com.google.protobuf.ByteString; public class TestWithDisabledAuthorization { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - + private static final String CONFIDENTIAL = "confidential"; private static final String SECRET = "secret"; private static final String PRIVATE = "private"; @@ -63,7 +65,7 @@ public class TestWithDisabledAuthorization { private static final byte[] ZERO = Bytes.toBytes(0L); - @Rule + @Rule public final TestName TEST_NAME = new TestName(); private static User SUPERUSER; @@ -95,6 +97,7 @@ public class TestWithDisabledAuthorization { // Define test labels SUPERUSER.runAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.addLabels(conn, @@ -103,7 +106,7 @@ public class TestWithDisabledAuthorization { new String[] { SECRET, CONFIDENTIAL }, USER_RW.getShortName()); } catch (Throwable t) { - fail("Should not have failed"); + fail("Should not have failed"); } return null; } @@ -120,13 +123,14 @@ public class TestWithDisabledAuthorization { // Even though authorization is disabled, we should be able to manage user auths SUPERUSER.runAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.setAuths(conn, new String[] { SECRET, CONFIDENTIAL }, USER_RW.getShortName()); } catch (Throwable t) { - fail("Should not have failed"); + fail("Should not have failed"); } return null; } @@ -134,6 +138,7 @@ public class TestWithDisabledAuthorization { PrivilegedExceptionAction> getAuths = new PrivilegedExceptionAction>() { + @Override public List run() throws Exception { GetAuthsResponse authsResponse = null; try (Connection conn = ConnectionFactory.createConnection(conf)) { @@ -156,13 +161,14 @@ public class TestWithDisabledAuthorization { assertTrue(authsList.contains(CONFIDENTIAL)); SUPERUSER.runAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.clearAuths(conn, new String[] { SECRET }, USER_RW.getShortName()); } catch (Throwable t) { - fail("Should not have failed"); + fail("Should not have failed"); } return null; } @@ -173,13 +179,14 @@ public class TestWithDisabledAuthorization { assertTrue(authsList.contains(CONFIDENTIAL)); SUPERUSER.runAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { try (Connection conn = ConnectionFactory.createConnection(conf)) { VisibilityClient.clearAuths(conn, new String[] { CONFIDENTIAL }, USER_RW.getShortName()); } catch (Throwable t) { - fail("Should not have failed"); + fail("Should not have failed"); } return null; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 64a60c2a63a..06de7b1d719 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -30,8 +30,6 @@ import java.util.Map; import java.util.Set; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -72,7 +70,8 @@ import org.apache.hadoop.hbase.util.FSVisitor; import org.apache.hadoop.hbase.util.MD5Hash; import org.apache.yetus.audience.InterfaceAudience; import org.junit.Assert; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; @@ -86,7 +85,7 @@ import com.google.protobuf.ServiceException; */ @InterfaceAudience.Private public final class SnapshotTestingUtils { - private static final Log LOG = LogFactory.getLog(SnapshotTestingUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(SnapshotTestingUtils.class); // default number of regions (and keys) given by getSplitKeys() and createTable() private static byte[] KEYS = Bytes.toBytes("0123456"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java index 28821201201..a2c015c6b53 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java @@ -29,8 +29,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -61,6 +59,8 @@ import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test creating/using/deleting snapshots from the client @@ -72,7 +72,7 @@ import org.junit.rules.TestRule; */ @Category({RegionServerTests.class, LargeTests.class}) public class TestFlushSnapshotFromClient { - private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFlushSnapshotFromClient.class); @ClassRule public static final TestRule timeout = CategoryBasedTimeout.forClass(TestFlushSnapshotFromClient.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java index bba293745c1..44c14aa0aef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobFlushSnapshotFromClient.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.snapshot; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -33,6 +31,8 @@ import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test creating/using/deleting snapshots from the client @@ -44,7 +44,7 @@ import org.junit.rules.TestRule; */ @Category({ClientTests.class, LargeTests.class}) public class TestMobFlushSnapshotFromClient extends TestFlushSnapshotFromClient { - private static final Log LOG = LogFactory.getLog(TestFlushSnapshotFromClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFlushSnapshotFromClient.class); @ClassRule public static final TestRule timeout = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java index 2a4ddde7828..e86115065d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreFlushSnapshotFromClient.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.snapshot; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -30,6 +28,8 @@ import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test clone/restore snapshots from the client @@ -39,7 +39,7 @@ import org.junit.experimental.categories.Category; */ @Category({ClientTests.class,LargeTests.class}) public class TestMobRestoreFlushSnapshotFromClient extends TestRestoreFlushSnapshotFromClient { - final Log LOG = LogFactory.getLog(getClass()); + final Logger LOG = LoggerFactory.getLogger(getClass()); @BeforeClass public static void setupCluster() throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java index 9126d8d3b86..f4e9870dde7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestMobRestoreSnapshotHelper.java @@ -19,20 +19,20 @@ package org.apache.hadoop.hbase.snapshot; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils.SnapshotMock; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the restore/clone operation from a file-system point of view. */ @Category(SmallTests.class) public class TestMobRestoreSnapshotHelper extends TestRestoreSnapshotHelper { - final Log LOG = LogFactory.getLog(getClass()); + final Logger LOG = LoggerFactory.getLogger(getClass()); @Override protected void setupConf(Configuration conf) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java index df8fc644865..380beba3318 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRegionSnapshotTask.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.snapshot; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -42,6 +40,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; @@ -61,7 +61,7 @@ import static org.mockito.Mockito.spy; */ @Category({MediumTests.class, RegionServerTests.class}) public class TestRegionSnapshotTask { - private final Log LOG = LogFactory.getLog(getClass()); + private final Logger LOG = LoggerFactory.getLogger(getClass()); private static HBaseTestingUtility TEST_UTIL; private static Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java index 6bc33145bb2..160f5099e08 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.snapshot; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -39,6 +37,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test clone/restore snapshots from the client @@ -48,7 +48,8 @@ import org.junit.experimental.categories.Category; */ @Category({RegionServerTests.class, LargeTests.class}) public class TestRestoreFlushSnapshotFromClient { - private static final Log LOG = LogFactory.getLog(TestRestoreFlushSnapshotFromClient.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestRestoreFlushSnapshotFromClient.class); protected final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java index b7110b24675..02bdae1a74b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,13 +44,15 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the restore/clone operation from a file-system point of view. */ @Category({RegionServerTests.class, SmallTests.class}) public class TestRestoreSnapshotHelper { - private static final Log LOG = LogFactory.getLog(TestRestoreSnapshotHelper.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRestoreSnapshotHelper.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected final static String TEST_HFILE = "abc"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java index 1c6920d5b59..bb492bad2f6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotClientRetries.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.SnapshotDescription; @@ -42,11 +40,13 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ MediumTests.class }) public class TestSnapshotClientRetries { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestSnapshotClientRetries.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotClientRetries.class); @Rule public TestTableName TEST_TABLE = new TestTableName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java index 038ee8cdb1f..73e0560ef1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java @@ -22,8 +22,6 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -37,6 +35,8 @@ import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test that the {@link SnapshotDescription} helper is helping correctly. @@ -66,7 +66,7 @@ public class TestSnapshotDescriptionUtils { EnvironmentEdgeManagerTestHelper.reset(); } - private static final Log LOG = LogFactory.getLog(TestSnapshotDescriptionUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotDescriptionUtils.class); @Test public void testValidateMissingTableName() throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java index 8ba4262ecc1..2758c03cca1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java @@ -21,8 +21,6 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -43,10 +41,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MasterTests.class, SmallTests.class}) public class TestSnapshotManifest { - private final Log LOG = LogFactory.getLog(getClass()); + private final Logger LOG = LoggerFactory.getLogger(getClass()); private static final String TABLE_NAME_STR = "testSnapshotManifest"; private static final TableName TABLE_NAME = TableName.valueOf(TABLE_NAME_STR); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java index bf43982f7d2..7e1e264ea3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java @@ -32,11 +32,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; -import java.util.regex.Pattern; import java.util.stream.IntStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -64,6 +61,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -78,6 +76,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -90,7 +90,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHF */ @Category({ MiscTests.class, LargeTests.class }) public class TestLoadIncrementalHFilesSplitRecovery { - private static final Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class); static HBaseTestingUtility util; // used by secure subclass @@ -289,7 +289,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { errConn = getMockedConnection(util.getConfiguration()); serviceCallable = this.buildClientServiceCallable(errConn, table, first, lqis, true); } catch (Exception e) { - LOG.fatal("mocking cruft, should never happen", e); + LOG.error(HBaseMarkers.FATAL, "mocking cruft, should never happen", e); throw new RuntimeException("mocking cruft, should never happen"); } failedCalls.incrementAndGet(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java index d9bef119dbc..fbc0096a5cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java @@ -34,8 +34,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -77,7 +75,8 @@ import org.apache.hadoop.hbase.util.HBaseFsck.TableInfo; import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker; import org.apache.zookeeper.KeeperException; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; @@ -92,7 +91,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; */ public class BaseTestHBaseFsck { static final int POOL_SIZE = 7; - protected static final Log LOG = LogFactory.getLog(BaseTestHBaseFsck.class); + protected static final Logger LOG = LoggerFactory.getLogger(BaseTestHBaseFsck.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected final static Configuration conf = TEST_UTIL.getConfiguration(); protected final static String FAM_STR = "fam"; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java index ad9d2174699..22a99a35df8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java @@ -26,8 +26,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -35,13 +33,15 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.Store; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test helper for testing archiving of HFiles */ public class HFileArchiveTestingUtil { - private static final Log LOG = LogFactory.getLog(HFileArchiveTestingUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(HFileArchiveTestingUtil.class); private HFileArchiveTestingUtil() { // NOOP private ctor since this is just a utility class diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java index dade11e78b8..551b940cf6f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; @@ -31,14 +29,17 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Basic mock Server for handler tests. */ public class MockServer implements Server { - private static final Log LOG = LogFactory.getLog(MockServer.class); + private static final Logger LOG = LoggerFactory.getLogger(MockServer.class); final static ServerName NAME = ServerName.valueOf("MockServer", 123, -1); boolean stopped; @@ -46,7 +47,6 @@ public class MockServer implements Server { final ZKWatcher zk; final HBaseTestingUtility htu; - @SuppressWarnings("unused") public MockServer() throws ZooKeeperConnectionException, IOException { // Shutdown default constructor by making it private. this(null); @@ -73,7 +73,7 @@ public class MockServer implements Server { @Override public void abort(String why, Throwable e) { - LOG.fatal("Abort why=" + why, e); + LOG.error(HBaseMarkers.FATAL, "Abort why=" + why, e); stop(why); this.aborted = true; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java index 6bfb23ea355..d1982194d90 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java @@ -31,8 +31,6 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -45,13 +43,15 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Common base class for reader and writer parts of multi-thread HBase load * test (See LoadTestTool). */ public abstract class MultiThreadedAction { - private static final Log LOG = LogFactory.getLog(MultiThreadedAction.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedAction.class); protected final TableName tableName; protected final Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java index 69256b79b07..447cca870b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java @@ -23,23 +23,22 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.RandomUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Creates multiple threads that read and verify previously written data */ public class MultiThreadedReader extends MultiThreadedAction { - private static final Log LOG = LogFactory.getLog(MultiThreadedReader.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReader.class); protected Set readers = new HashSet<>(); private final double verifyPercent; @@ -286,7 +285,7 @@ public class MultiThreadedReader extends MultiThreadedAction + ", time from start: " + (System.currentTimeMillis() - startTimeMs) + " ms"); if (printExceptionTrace) { - LOG.warn(e); + LOG.warn(e.toString(), e); printExceptionTrace = false; } } @@ -302,7 +301,7 @@ public class MultiThreadedReader extends MultiThreadedAction + (System.currentTimeMillis() - startTimeMs) + " ms"); } if (printExceptionTrace) { - LOG.warn(e); + LOG.warn(e.toString(), e); printExceptionTrace = false; } } @@ -379,7 +378,7 @@ public class MultiThreadedReader extends MultiThreadedAction numKeysVerified.incrementAndGet(); } } else { - HRegionLocation hloc = ((ClusterConnection) connection).getRegionLocation(tableName, + HRegionLocation hloc = connection.getRegionLocation(tableName, get.getRow(), false); String rowKey = Bytes.toString(get.getRow()); LOG.info("Key = " + rowKey + ", Region location: " + hloc); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java index e9511753721..86a8500bcbd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java @@ -21,8 +21,6 @@ import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; @@ -32,12 +30,14 @@ import org.apache.hadoop.hbase.security.HBaseKerberosUtils; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A MultiThreadReader that helps to work with ACL */ public class MultiThreadedReaderWithACL extends MultiThreadedReader { - private static final Log LOG = LogFactory.getLog(MultiThreadedReaderWithACL.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReaderWithACL.class); private static final String COMMA = ","; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java index acc1c5ed10b..768f961fd7a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java @@ -30,8 +30,6 @@ import java.util.Map; import java.util.Set; import org.apache.commons.lang3.RandomUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; @@ -49,12 +47,13 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.util.StringUtils; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** Creates multiple threads that write key/values into the */ public class MultiThreadedUpdater extends MultiThreadedWriterBase { - private static final Log LOG = LogFactory.getLog(MultiThreadedUpdater.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdater.class); protected Set updaters = new HashSet<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java index 663e9875bb4..c3258b466e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java @@ -24,8 +24,6 @@ import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Append; @@ -42,12 +40,14 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A MultiThreadUpdater that helps to work with ACL */ public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater { - private static final Log LOG = LogFactory.getLog(MultiThreadedUpdaterWithACL.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdaterWithACL.class); private final static String COMMA= ","; private User userOwner; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java index 07e9cc8ae3c..bcd24d57b70 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java @@ -28,8 +28,6 @@ import java.util.Arrays; import java.util.HashSet; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -38,10 +36,12 @@ import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Creates multiple threads that write key/values into the */ public class MultiThreadedWriter extends MultiThreadedWriterBase { - private static final Log LOG = LogFactory.getLog(MultiThreadedWriter.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriter.class); protected Set writers = new HashSet<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java index fbf745ffb04..54be0d3f88c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java @@ -28,17 +28,17 @@ import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Creates multiple threads that write key/values into the */ public abstract class MultiThreadedWriterBase extends MultiThreadedAction { - private static final Log LOG = LogFactory.getLog(MultiThreadedWriterBase.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterBase.class); /** * A temporary place to keep track of inserted/updated keys. This is written to by diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java index 48062882f27..7d7f5971da9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java @@ -22,8 +22,6 @@ import java.io.PrintWriter; import java.io.StringWriter; import java.security.PrivilegedExceptionAction; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -32,13 +30,15 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * MultiThreadedWriter that helps in testing ACL */ public class MultiThreadedWriterWithACL extends MultiThreadedWriter { - private static final Log LOG = LogFactory.getLog(MultiThreadedWriterWithACL.class); + private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterWithACL.class); private User userOwner; public MultiThreadedWriterWithACL(LoadTestDataGenerator dataGen, Configuration conf, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java index a5cf0bd4a65..6415fdb62e6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java @@ -39,8 +39,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -51,6 +49,8 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A helper class for process-based mini-cluster tests. Unlike @@ -69,7 +69,7 @@ public class ProcessBasedLocalHBaseCluster { private static final int MAX_FILE_SIZE_OVERRIDE = 10 * 1000 * 1000; - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( ProcessBasedLocalHBaseCluster.class); private List daemonPidFiles = @@ -461,7 +461,7 @@ public class ProcessBasedLocalHBaseCluster { try { runInternal(); } catch (IOException ex) { - LOG.error(ex); + LOG.error(ex.toString(), ex); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java index 7972855d0c8..d78e34af114 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; import org.apache.commons.cli.CommandLine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -33,6 +31,8 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A command-line tool that spins up a local process-based cluster, loads @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; */ public class RestartMetaTest extends AbstractHBaseTool { - private static final Log LOG = LogFactory.getLog(RestartMetaTest.class); + private static final Logger LOG = LoggerFactory.getLogger(RestartMetaTest.class); /** The number of region servers used if not specified */ private static final int DEFAULT_NUM_RS = 2; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java index 395c04d599a..a90a47fc75f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -32,6 +30,8 @@ import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.ReflectionUtils; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.BufferedOutputStream; import java.io.DataOutputStream; @@ -41,7 +41,7 @@ import static org.junit.Assert.*; @Category({MiscTests.class, SmallTests.class}) public class TestCompressionTest { - private static final Log LOG = LogFactory.getLog(TestCompressionTest.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCompressionTest.class); @Test public void testExceptionCaching() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java index 5899971838f..8f503e034b1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java @@ -21,8 +21,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -35,13 +33,15 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test our recoverLease loop against mocked up filesystem. */ @Category({MiscTests.class, MediumTests.class}) public class TestFSHDFSUtils { - private static final Log LOG = LogFactory.getLog(TestFSHDFSUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFSHDFSUtils.class); private static final HBaseTestingUtility HTU = new HBaseTestingUtility(); static { Configuration conf = HTU.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java index 30a7cd69a6a..2c27a9313ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java @@ -32,8 +32,6 @@ import java.util.Map; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -53,6 +51,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for {@link FSTableDescriptors}. @@ -61,7 +61,7 @@ import org.junit.rules.TestName; @Category({MiscTests.class, MediumTests.class}) public class TestFSTableDescriptors { private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestFSTableDescriptors.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFSTableDescriptors.class); @Rule public TestName name = new TestName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java index 055c28d38b1..3d1f10f20da 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java @@ -30,8 +30,6 @@ import java.io.IOException; import java.util.Random; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -52,13 +50,15 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test {@link FSUtils}. */ @Category({MiscTests.class, MediumTests.class}) public class TestFSUtils { - private static final Log LOG = LogFactory.getLog(TestFSUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFSUtils.class); private HBaseTestingUtility htu; private FileSystem fs; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java index e455b0ad270..cb23a0b3504 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java @@ -25,8 +25,6 @@ import java.util.UUID; import java.util.Set; import java.util.HashSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -34,13 +32,15 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.junit.*; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test {@link FSUtils}. */ @Category({MiscTests.class, MediumTests.class}) public class TestFSVisitor { - private static final Log LOG = LogFactory.getLog(TestFSVisitor.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFSVisitor.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java index c3f934dee65..769f8ef6db3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java @@ -31,18 +31,18 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, MediumTests.class}) // Medium as it creates 100 threads; seems better to run it isolated public class TestIdLock { - private static final Log LOG = LogFactory.getLog(TestIdLock.class); + private static final Logger LOG = LoggerFactory.getLogger(TestIdLock.class); private static final int NUM_IDS = 16; private static final int NUM_THREADS = 128; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java index 7dd2a6322af..af89d357d84 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java @@ -35,8 +35,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType; @@ -44,13 +42,15 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @RunWith(Parameterized.class) @Category({MiscTests.class, MediumTests.class}) // Medium as it creates 100 threads; seems better to run it isolated public class TestIdReadWriteLock { - private static final Log LOG = LogFactory.getLog(TestIdReadWriteLock.class); + private static final Logger LOG = LoggerFactory.getLogger(TestIdReadWriteLock.class); private static final int NUM_IDS = 16; private static final int NUM_THREADS = 128; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java index 1135039100a..13c6df5d1eb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestJSONMetricUtil.java @@ -34,17 +34,18 @@ import static org.junit.Assert.assertNull; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestJSONMetricUtil { - private static final Log LOG = LogFactory.getLog(TestJSONMetricUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(TestJSONMetricUtil.class); @Test public void testBuildHashtable() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java index be4e850c05c..adc7567f2d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java @@ -24,8 +24,6 @@ import java.util.Collection; import java.util.EnumSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -47,6 +45,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A write/read/verify load test on a mini HBase cluster. Tests reading @@ -56,7 +56,7 @@ import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestMiniClusterLoadSequential { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( TestMiniClusterLoadSequential.class); protected static final TableName TABLE = diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java index 68196a02407..7ab0dd38cfa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionMover.java @@ -24,8 +24,6 @@ import static org.junit.Assert.assertEquals; import java.io.File; import java.io.FileWriter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MiniHBaseCluster; @@ -41,6 +39,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for Region Mover Load/Unload functionality with and without ack mode and also to test @@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestRegionMover { - final Log LOG = LogFactory.getLog(getClass()); + final Logger LOG = LoggerFactory.getLogger(getClass()); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @BeforeClass diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java index 02578ff2642..88962f0174d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java @@ -28,8 +28,6 @@ import java.util.List; import java.util.SortedSet; import java.util.UUID; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; @@ -38,10 +36,12 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ComparisonChain; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({MiscTests.class, SmallTests.class}) public class TestRegionSplitCalculator { - private static final Log LOG = LogFactory.getLog(TestRegionSplitCalculator.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class); /** * This is range uses a user specified start and end keys. It also has an diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java index 9643443471b..40564ee07fa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java @@ -28,8 +28,6 @@ import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; @@ -49,6 +47,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for {@link RegionSplitter}, which can create a pre-split table or do a @@ -56,7 +56,7 @@ import org.junit.rules.TestName; */ @Category({MiscTests.class, MediumTests.class}) public class TestRegionSplitter { - private final static Log LOG = LogFactory.getLog(TestRegionSplitter.class); + private final static Logger LOG = LoggerFactory.getLogger(TestRegionSplitter.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private final static String CF_NAME = "SPLIT_TEST_CF"; private final static byte xFF = (byte) 0xff; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java index 1ecfa2b0772..61d3aea43d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java @@ -23,20 +23,20 @@ import junit.framework.TestCase; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test requirement that root directory must be a URI */ @Category({MiscTests.class, SmallTests.class}) public class TestRootPath extends TestCase { - private static final Log LOG = LogFactory.getLog(TestRootPath.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRootPath.class); /** The test */ public void testRootPath() { @@ -44,14 +44,14 @@ public class TestRootPath extends TestCase { // Try good path FSUtils.validateRootPath(new Path("file:///tmp/hbase/hbase")); } catch (IOException e) { - LOG.fatal("Unexpected exception checking valid path:", e); + LOG.error(HBaseMarkers.FATAL, "Unexpected exception checking valid path:", e); fail(); } try { // Try good path FSUtils.validateRootPath(new Path("hdfs://a:9000/hbase")); } catch (IOException e) { - LOG.fatal("Unexpected exception checking valid path:", e); + LOG.error(HBaseMarkers.FATAL, "Unexpected exception checking valid path:", e); fail(); } try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java index e6070ddd0ff..1085ce4a3ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -58,6 +56,8 @@ import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.Before; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This testing base class creates a minicluster and testing table table @@ -74,8 +74,8 @@ import org.junit.experimental.categories.Category; */ @Category({MiscTests.class, LargeTests.class}) public class OfflineMetaRebuildTestCore { - private final static Log LOG = LogFactory - .getLog(OfflineMetaRebuildTestCore.class); + private final static Logger LOG = LoggerFactory + .getLogger(OfflineMetaRebuildTestCore.class); protected HBaseTestingUtility TEST_UTIL; protected Configuration conf; private final static byte[] FAM = Bytes.toBytes("fam"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java index 58ba0470436..27eeb594081 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.util.test; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.security.access.Permission; @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.util.MultiThreadedAction.DefaultDataGenerator; @InterfaceAudience.Private public class LoadTestDataGeneratorWithACL extends DefaultDataGenerator { - private static final Log LOG = LogFactory.getLog(LoadTestDataGeneratorWithACL.class); + private static final Logger LOG = LoggerFactory.getLogger(LoadTestDataGeneratorWithACL.class); private String[] userNames = null; private static final String COMMA = ","; private int specialPermCellInsertionFactor = 100; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java index f578c11861b..bfd18cf5f4a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java @@ -27,13 +27,13 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; // imports for things that haven't moved from regionserver.wal yet. import org.apache.hadoop.hbase.regionserver.wal.FSHLog; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter; @@ -70,7 +70,7 @@ import org.apache.hadoop.hbase.wal.WAL.Entry; */ @InterfaceAudience.Private public class IOTestProvider implements WALProvider { - private static final Log LOG = LogFactory.getLog(IOTestProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(IOTestProvider.class); private static final String ALLOWED_OPERATIONS = "hbase.wal.iotestprovider.operations"; private enum AllowedOperations { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java index 73725bb4e3a..be65ba3b501 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java @@ -30,8 +30,6 @@ import java.util.HashSet; import java.util.Random; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; @@ -51,11 +49,14 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @RunWith(Parameterized.class) @Category({ RegionServerTests.class, LargeTests.class }) public class TestBoundedRegionGroupingStrategy { - private static final Log LOG = LogFactory.getLog(TestBoundedRegionGroupingStrategy.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestBoundedRegionGroupingStrategy.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java index d3d4d53e288..da2965e5ce8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java @@ -30,8 +30,6 @@ import java.util.Random; import java.util.Set; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -58,10 +56,12 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({RegionServerTests.class, MediumTests.class}) public class TestFSHLogProvider { - private static final Log LOG = LogFactory.getLog(TestFSHLogProvider.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFSHLogProvider.class); protected static Configuration conf; protected static FileSystem fs; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java index ac53ae9554b..0814fcb6c0d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java @@ -27,8 +27,6 @@ import java.util.NavigableMap; import java.util.TreeMap; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -50,7 +48,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -67,10 +64,6 @@ import org.junit.runners.Parameterized.Parameters; @Category({ RegionServerTests.class, MediumTests.class }) public class TestSecureWAL { - static { - ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hbase.regionserver.wal")) - .getLogger().setLevel(Level.ALL); - }; static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @Rule diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java index 7c1af2539ee..2c19c12965a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java @@ -30,8 +30,7 @@ import java.net.BindException; import java.util.List; import java.util.NavigableMap; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -71,13 +70,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * WAL tests that can be reused across providers. */ @Category({RegionServerTests.class, MediumTests.class}) public class TestWALFactory { - private static final Log LOG = LogFactory.getLog(TestWALFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALFactory.class); protected static Configuration conf; private static MiniDFSCluster cluster; @@ -402,7 +403,7 @@ public class TestWALFactory { // Stop the cluster. (ensure restart since we're sharing MiniDFSCluster) try { - DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem(); + DistributedFileSystem dfs = cluster.getFileSystem(); dfs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_ENTER); TEST_UTIL.shutdownMiniDFSCluster(); try { @@ -410,7 +411,7 @@ public class TestWALFactory { // but still call this since it closes the LogSyncer thread first wal.shutdown(); } catch (IOException e) { - LOG.info(e); + LOG.info(e.toString(), e); } fs.close(); // closing FS last so DFSOutputStream can't call close LOG.info("STOPPED first instance of the cluster"); @@ -445,7 +446,7 @@ public class TestWALFactory { try { Thread.sleep(1000); } catch (InterruptedException e) { - LOG.info(e); + LOG.info(e.toString(), e); } // Now try recovering the log, like the HMaster would do @@ -454,6 +455,7 @@ public class TestWALFactory { class RecoverLogThread extends Thread { public Exception exception = null; + @Override public void run() { try { FSUtils.getInstance(fs, rlConf) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java index 32253106cce..3672f9b146d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java @@ -27,8 +27,6 @@ import java.util.NavigableMap; import java.util.TreeMap; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileStatus; @@ -58,7 +56,6 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; -import org.apache.log4j.Level; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; @@ -70,10 +67,6 @@ import org.junit.rules.TestName; */ @Category({RegionServerTests.class, MediumTests.class}) public class TestWALReaderOnSecureWAL { - static { - ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hbase.regionserver.wal")) - .getLogger().setLevel(Level.ALL); - }; static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); final byte[] value = Bytes.toBytes("Test value"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java index cd8bbe470f8..c71fb4b36aa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALRootDir.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.wal; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -39,6 +37,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -49,7 +49,7 @@ import static org.junit.Assert.assertEquals; @Category(MediumTests.class) public class TestWALRootDir { - private static final Log LOG = LogFactory.getLog(TestWALRootDir.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWALRootDir.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf; private static FileSystem fs; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java index 0fc0df1e64e..ad3c2b50c7e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java @@ -39,14 +39,13 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NavigableSet; +import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -96,6 +95,8 @@ import org.junit.rules.TestName; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Testing {@link WAL} splitting code. @@ -109,7 +110,7 @@ public class TestWALSplit { //((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL); //((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL); } - private final static Log LOG = LogFactory.getLog(TestWALSplit.class); + private final static Logger LOG = LoggerFactory.getLogger(TestWALSplit.class); private static Configuration conf; private FileSystem fs; @@ -240,7 +241,7 @@ public class TestWALSplit { for (FileStatus status : fs.listStatus(WALDIR)) { ls.append("\t").append(status.toString()).append("\n"); } - LOG.debug(ls); + LOG.debug(Objects.toString(ls)); LOG.info("Splitting WALs out from under zombie. Expecting " + numWriters + " files."); WALSplitter.split(HBASEDIR, WALDIR, OLDLOGDIR, fs, conf2, wals); LOG.info("Finished splitting out from under zombie."); @@ -820,6 +821,7 @@ public class TestWALSplit { someOldThread.setDaemon(true); someOldThread.start(); final Thread t = new Thread("Background-thread-dumper") { + @Override public void run() { try { Threads.threadDumpingIsAlive(someOldThread); @@ -888,6 +890,7 @@ public class TestWALSplit { "Blocklist for " + OLDLOGDIR + " has changed"}; private int count = 0; + @Override public FSDataInputStream answer(InvocationOnMock invocation) throws Throwable { if (count < 3) { throw new IOException(errors[count++]); @@ -917,6 +920,7 @@ public class TestWALSplit { FileSystem spiedFs = Mockito.spy(fs); Mockito.doAnswer(new Answer() { + @Override public FSDataInputStream answer(InvocationOnMock invocation) throws Throwable { Thread.sleep(1500); // Sleep a while and wait report status invoked return (FSDataInputStream)invocation.callRealMethod(); @@ -1035,7 +1039,7 @@ public class TestWALSplit { byte region[] = new byte[] {(byte)'r', (byte) (0x30 + regionIdx)}; Entry ret = createTestEntry(TABLE_NAME, region, - Bytes.toBytes((int)(index / regions.size())), + Bytes.toBytes(index / regions.size()), FAMILY, QUALIFIER, VALUE, index); index++; return ret; @@ -1157,7 +1161,7 @@ public class TestWALSplit { try{ logSplitter.splitLogFile(logfiles[0], null); } catch (IOException e) { - LOG.info(e); + LOG.info(e.toString(), e); fail("Throws IOException when spliting " + "log, it is most likely because writing file does not " + "exist which is caused by concurrent replayRecoveredEditsIfAny()"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java index de23d61e88b..93e63735457 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java @@ -31,8 +31,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileStatus; @@ -70,6 +68,8 @@ import org.apache.htrace.core.Sampler; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.Histogram; @@ -86,7 +86,9 @@ import com.codahale.metrics.MetricRegistry; */ @InterfaceAudience.Private public final class WALPerformanceEvaluation extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(WALPerformanceEvaluation.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(WALPerformanceEvaluation.class); + private final MetricRegistry metrics = new MetricRegistry(); private final Meter syncMeter = metrics.meter(name(WALPerformanceEvaluation.class, "syncMeter", "syncs")); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java index 346b8fbff0c..4e67b9171db 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java @@ -28,8 +28,6 @@ import java.util.List; import javax.security.auth.login.AppConfigurationEntry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -46,10 +44,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ZKTests.class, MediumTests.class }) public class TestZooKeeperACL { - private final static Log LOG = LogFactory.getLog(TestZooKeeperACL.class); + private final static Logger LOG = LoggerFactory.getLogger(TestZooKeeperACL.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml index 8720c5a532d..db27b3a9a2c 100644 --- a/hbase-shaded/pom.xml +++ b/hbase-shaded/pom.xml @@ -59,7 +59,6 @@ org.slf4j slf4j-log4j12 - ${slf4j.version} true diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml index c365926ea80..e33fcb5a731 100644 --- a/hbase-shell/pom.xml +++ b/hbase-shell/pom.xml @@ -212,8 +212,8 @@ - commons-logging - commons-logging + org.slf4j + slf4j-api org.jruby diff --git a/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java b/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java index 26fa81dc73d..70b215c452d 100644 --- a/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java +++ b/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java @@ -18,8 +18,6 @@ */ package org.apache.hadoop.hbase.client.rsgroup; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -35,6 +33,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -44,7 +44,7 @@ import java.util.List; //Since we need to use a different balancer and run more than 1 RS @Category({ClientTests.class, LargeTests.class}) public class TestShellRSGroups { - final Log LOG = LogFactory.getLog(getClass()); + final Logger LOG = LoggerFactory.getLogger(getClass()); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static ScriptingContainer jruby = new ScriptingContainer(); private static String basePath; diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml index 07eb286a8fa..83a949d6f2b 100644 --- a/hbase-testing-util/pom.xml +++ b/hbase-testing-util/pom.xml @@ -129,7 +129,6 @@ org.slf4j slf4j-log4j12 - ${slf4j.version} diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml index ad18dbb39e3..4f9d37416b7 100644 --- a/hbase-thrift/pom.xml +++ b/hbase-thrift/pom.xml @@ -210,8 +210,8 @@ hbase-shaded-miscellaneous - commons-logging - commons-logging + org.slf4j + slf4j-api commons-cli diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java index da09825c1a4..82c9b5d607d 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java @@ -25,9 +25,9 @@ import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A BlockingQueue reports waiting time in queue and queue length to @@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class CallQueue implements BlockingQueue { - private static final Log LOG = LogFactory.getLog(CallQueue.class); + private static final Logger LOG = LoggerFactory.getLogger(CallQueue.class); private final BlockingQueue underlyingQueue; private final ThriftMetrics metrics; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java index 58146ca164b..ad1384c6053 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java @@ -21,19 +21,19 @@ package org.apache.hadoop.hbase.thrift; import java.util.Locale; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.thrift.server.TThreadedSelectorServer; import org.apache.thrift.transport.TNonblockingServerTransport; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A TThreadedSelectorServer.Args that reads hadoop configuration */ @InterfaceAudience.Private public class HThreadedSelectorServerArgs extends TThreadedSelectorServer.Args { - private static final Log LOG = LogFactory.getLog(TThreadedSelectorServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TThreadedSelectorServer.class); /** * Number of selector threads for reading and writing socket diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java index 2fc65466925..46e394394ae 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java @@ -23,11 +23,11 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.thrift.generated.Hbase; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Converts a Hbase.Iface using InvocationHandler so that it reports process @@ -36,7 +36,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class HbaseHandlerMetricsProxy implements InvocationHandler { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( HbaseHandlerMetricsProxy.class); private final Hbase.Iface handler; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java index 3f0530a3386..60a8b7faac4 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java @@ -32,8 +32,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.LongAdder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler; @@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.thrift.TException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class will coalesce increments from a thift server if @@ -161,7 +161,7 @@ public class IncrementCoalescer implements IncrementCoalescerMBean { private int maxQueueSize = 500000; private static final int CORE_POOL_SIZE = 1; - private static final Log LOG = LogFactory.getLog(FullyQualifiedRow.class); + private static final Logger LOG = LoggerFactory.getLogger(FullyQualifiedRow.class); @SuppressWarnings("deprecation") public IncrementCoalescer(HBaseHandler hand) { diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java index 56736ca5522..d4cb8d62b36 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java @@ -25,8 +25,6 @@ import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.util.Threads; import org.apache.thrift.TException; @@ -39,7 +37,8 @@ import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** @@ -101,7 +100,7 @@ public class TBoundedThreadPoolServer extends TServer { */ public static final int TIME_TO_WAIT_AFTER_SHUTDOWN_MS = 5000; - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( TBoundedThreadPoolServer.class.getName()); private final CallQueue callQueue; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java index 784f9dca6f1..4f55a01a08f 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHttpServlet.java @@ -25,8 +25,6 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.security.SecurityUtil; import org.apache.hadoop.hbase.util.Base64; @@ -43,6 +41,8 @@ import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSManager; import org.ietf.jgss.GSSName; import org.ietf.jgss.Oid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Thrift Http Servlet is used for performing Kerberos authentication if security is enabled and @@ -51,7 +51,7 @@ import org.ietf.jgss.Oid; @InterfaceAudience.Private public class ThriftHttpServlet extends TServlet { private static final long serialVersionUID = 1L; - private static final Log LOG = LogFactory.getLog(ThriftHttpServlet.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(ThriftHttpServlet.class.getName()); private transient final UserGroupInformation realUser; private transient final Configuration conf; private final boolean securityEnabled; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java index 8bf6409a954..cd1993de6dc 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java @@ -26,8 +26,6 @@ import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.PosixParser; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -36,6 +34,8 @@ import org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ThriftServer- this class starts up a Thrift server which implements the @@ -45,7 +45,7 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) public class ThriftServer { - private static final Log LOG = LogFactory.getLog(ThriftServer.class); + private static final Logger LOG = LoggerFactory.getLogger(ThriftServer.class); private static final String MIN_WORKERS_OPTION = "minWorkers"; private static final String MAX_WORKERS_OPTION = "workers"; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 27850ef0215..ba278474c76 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -48,8 +48,6 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionGroup; import org.apache.commons.lang3.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilder; @@ -84,6 +82,7 @@ import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.security.SaslUtil; import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; import org.apache.hadoop.hbase.security.SecurityUtil; @@ -139,7 +138,8 @@ import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -151,7 +151,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa @InterfaceAudience.Private public class ThriftServerRunner implements Runnable { - private static final Log LOG = LogFactory.getLog(ThriftServerRunner.class); + private static final Logger LOG = LoggerFactory.getLogger(ThriftServerRunner.class); private static final int DEFAULT_HTTP_MAX_HEADER_SIZE = 64 * 1024; // 64k @@ -386,7 +386,7 @@ public class ThriftServerRunner implements Runnable { tserver.serve(); } } catch (Exception e) { - LOG.fatal("Cannot run ThriftServer", e); + LOG.error(HBaseMarkers.FATAL, "Cannot run ThriftServer", e); // Crash the process if the ThriftServer is not running System.exit(-1); } @@ -714,7 +714,7 @@ public class ThriftServerRunner implements Runnable { */ public static class HBaseHandler implements Hbase.Iface { protected Configuration conf; - protected static final Log LOG = LogFactory.getLog(HBaseHandler.class); + protected static final Logger LOG = LoggerFactory.getLogger(HBaseHandler.class); // nextScannerId and scannerMap are used to manage scanner state protected int nextScannerId = 0; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java index be74f0c4510..3773fc661eb 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java @@ -46,8 +46,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HRegionLocation; @@ -73,6 +71,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ConnectionCache; import org.apache.thrift.TException; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is a glue object that connects Thrift RPC calls to the HBase client API primarily @@ -83,7 +83,7 @@ import org.apache.yetus.audience.InterfaceAudience; public class ThriftHBaseServiceHandler implements THBaseService.Iface { // TODO: Size of pool configuraple - private static final Log LOG = LogFactory.getLog(ThriftHBaseServiceHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(ThriftHBaseServiceHandler.class); // nextScannerId and scannerMap are used to manage scanner state // TODO: Cleanup thread for Scanners, Scanner id wrap diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java index 4f75dbcdad6..82bce156446 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java @@ -44,8 +44,6 @@ import org.apache.commons.cli.OptionGroup; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -86,7 +84,8 @@ import org.apache.thrift.transport.TServerTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; import org.apache.yetus.audience.InterfaceAudience; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** @@ -96,7 +95,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @SuppressWarnings({ "rawtypes", "unchecked" }) public class ThriftServer extends Configured implements Tool { - private static final Log log = LogFactory.getLog(ThriftServer.class); + private static final Logger log = LoggerFactory.getLogger(ThriftServer.class); /** * Thrift quality of protection configuration key. Valid values can be: diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java index b5d0f04ea1d..a13774db078 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java @@ -22,8 +22,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.concurrent.LinkedBlockingQueue; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -35,6 +33,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Unit testing for CallQueue, a part of the @@ -44,7 +44,7 @@ import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestCallQueue { - private static final Log LOG = LogFactory.getLog(TestCallQueue.class); + private static final Logger LOG = LoggerFactory.getLogger(TestCallQueue.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final MetricsAssertHelper metricsHelper = diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java index c5fff4ee2ca..aeb24f79b21 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java @@ -23,8 +23,6 @@ import static org.junit.Assert.assertFalse; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -43,7 +41,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; /** @@ -54,8 +53,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; public class TestThriftHttpServer { - private static final Log LOG = - LogFactory.getLog(TestThriftHttpServer.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestThriftHttpServer.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java index d0052e50be5..a7ae8537d93 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java @@ -32,8 +32,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompatibilityFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -69,6 +67,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Unit testing for ThriftServerRunner.HBaseHandler, a part of the @@ -77,7 +77,7 @@ import org.junit.rules.TestName; @Category({ClientTests.class, LargeTests.class}) public class TestThriftServer { private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - private static final Log LOG = LogFactory.getLog(TestThriftServer.class); + private static final Logger LOG = LoggerFactory.getLogger(TestThriftServer.class); private static final MetricsAssertHelper metricsHelper = CompatibilityFactory .getInstance(MetricsAssertHelper.class); protected static final int MAXVERSIONS = 3; diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java index 4d52576cced..818b162c476 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -50,7 +48,8 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; /** @@ -61,8 +60,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; @RunWith(Parameterized.class) public class TestThriftServerCmdLine { - private static final Log LOG = - LogFactory.getLog(TestThriftServerCmdLine.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestThriftServerCmdLine.class); private final ImplType implType; private boolean specifyFramed; diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java index e2e75804627..c1b523ac4ec 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java @@ -47,8 +47,6 @@ import java.util.Optional; import java.util.concurrent.TimeUnit; import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompatibilityFactory; @@ -105,7 +103,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** @@ -115,7 +114,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({ClientTests.class, MediumTests.class}) public class TestThriftHBaseServiceHandler { - private static final Log LOG = LogFactory.getLog(TestThriftHBaseServiceHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestThriftHBaseServiceHandler.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); // Static names for tables, columns, rows, and values @@ -1563,7 +1562,7 @@ public class TestThriftHBaseServiceHandler { } public static class DelayingRegionObserver implements RegionCoprocessor, RegionObserver { - private static final Log LOG = LogFactory.getLog(DelayingRegionObserver.class); + private static final Logger LOG = LoggerFactory.getLogger(DelayingRegionObserver.class); // sleep time in msec private long delayMillis; diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java index 3e459a68665..89a48bd4006 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithLabels.java @@ -32,8 +32,6 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -72,12 +70,14 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ClientTests.class, MediumTests.class}) public class TestThriftHBaseServiceHandlerWithLabels { - private static final Log LOG = LogFactory - .getLog(TestThriftHBaseServiceHandlerWithLabels.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestThriftHBaseServiceHandlerWithLabels.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); // Static names for tables, columns, rows, and values diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java index dfd244ba724..50f7a4b8f83 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandlerWithReadOnly.java @@ -30,8 +30,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -64,8 +62,6 @@ import org.junit.experimental.categories.Category; @Category({ClientTests.class, MediumTests.class}) public class TestThriftHBaseServiceHandlerWithReadOnly { - - private static final Log LOG = LogFactory.getLog(TestThriftHBaseServiceHandlerWithReadOnly.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); // Static names for tables, columns, rows, and values diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml index ad2e481bc1e..9659b66cb10 100644 --- a/hbase-zookeeper/pom.xml +++ b/hbase-zookeeper/pom.xml @@ -213,8 +213,8 @@ commons-lang3 - commons-logging - commons-logging + org.slf4j + slf4j-api log4j diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java index 4e9b34c576a..730c099c65f 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java @@ -18,11 +18,11 @@ */ package org.apache.hadoop.hbase.zookeeper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos; @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos; */ @InterfaceAudience.Private public class ClusterStatusTracker extends ZKNodeTracker { - private static final Log LOG = LogFactory.getLog(ClusterStatusTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusTracker.class); /** * Creates a cluster status tracker. diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java index 7c028912066..ed8751b7a9e 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/DeletionListener.java @@ -21,10 +21,10 @@ package org.apache.hadoop.hbase.zookeeper; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A ZooKeeper watcher meant to detect deletions of ZNodes. @@ -32,7 +32,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Private public class DeletionListener extends ZKListener { - private static final Log LOG = LogFactory.getLog(DeletionListener.class); + private static final Logger LOG = LoggerFactory.getLogger(DeletionListener.class); private final String pathToWatch; private final CountDownLatch deletedLatch; diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java index cddce3319fa..d3085b778f6 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/LoadBalancerTracker.java @@ -19,13 +19,13 @@ package org.apache.hadoop.hbase.zookeeper; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos; @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos; */ @InterfaceAudience.Private public class LoadBalancerTracker extends ZKNodeTracker { - private static final Log LOG = LogFactory.getLog(LoadBalancerTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(LoadBalancerTracker.class); public LoadBalancerTracker(ZKWatcher watcher, Abortable abortable) { diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java index 6e231886ade..3d58c411e64 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java @@ -29,8 +29,6 @@ import java.util.Collections; import java.util.List; import java.util.Locale; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException; @@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.ipc.RemoteException; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos; @@ -77,7 +76,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaReg */ @InterfaceAudience.Private public class MetaTableLocator { - private static final Log LOG = LogFactory.getLog(MetaTableLocator.class); + private static final Logger LOG = LoggerFactory.getLogger(MetaTableLocator.class); // only needed to allow non-timeout infinite waits to stop when cluster shuts down private volatile boolean stopped = false; diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java index a4acda2c2da..516ff9a65cb 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java @@ -32,14 +32,14 @@ import java.util.ArrayList; import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.server.NIOServerCnxnFactory; import org.apache.zookeeper.server.ZooKeeperServer; import org.apache.zookeeper.server.persistence.FileTxnLog; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe */ @InterfaceAudience.Public public class MiniZooKeeperCluster { - private static final Log LOG = LogFactory.getLog(MiniZooKeeperCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(MiniZooKeeperCluster.class); private static final int TICK_TIME = 2000; private static final int DEFAULT_CONNECTION_TIMEOUT = 30000; diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java index 4b88a5de079..4c76a5cb688 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java @@ -24,8 +24,6 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -46,6 +44,8 @@ import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.proto.CreateRequest; import org.apache.zookeeper.proto.SetDataRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A zookeeper that can handle 'recoverable' errors. @@ -72,7 +72,7 @@ import org.apache.zookeeper.proto.SetDataRequest; */ @InterfaceAudience.Private public class RecoverableZooKeeper { - private static final Log LOG = LogFactory.getLog(RecoverableZooKeeper.class); + private static final Logger LOG = LoggerFactory.getLogger(RecoverableZooKeeper.class); // the actual ZooKeeper client instance private ZooKeeper zk; private final RetryCounterFactory retryCounterFactory; diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java index 58d405d5576..a50ce4c8c67 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java @@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.zookeeper; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos; @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos; * Tracks region normalizer state up in ZK */ public class RegionNormalizerTracker extends ZKNodeTracker { - private static final Log LOG = LogFactory.getLog(RegionNormalizerTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(RegionNormalizerTracker.class); public RegionNormalizerTracker(ZKWatcher watcher, Abortable abortable) { super(watcher, watcher.znodePaths.regionNormalizerZNode, abortable); diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java index c0f33e588b8..50a6f5e78fe 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAclReset.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.zookeeper; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -31,6 +29,8 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * You may add the jaas.conf option @@ -44,7 +44,7 @@ import org.apache.zookeeper.ZooKeeper; */ @InterfaceAudience.Private public class ZKAclReset extends Configured implements Tool { - private static final Log LOG = LogFactory.getLog(ZKAclReset.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKAclReset.class); private static void resetAcls(final ZKWatcher zkw, final String znode, final boolean eraseAcls) throws Exception { diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java index 25310bd50cd..5918e687640 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java @@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.zookeeper; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Handles coordination of a single "leader" instance among many possible @@ -41,7 +41,7 @@ import org.apache.zookeeper.KeeperException; @Deprecated @InterfaceAudience.Private public class ZKLeaderManager extends ZKListener { - private static final Log LOG = LogFactory.getLog(ZKLeaderManager.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKLeaderManager.class); private final Object lock = new Object(); private final AtomicBoolean leaderExists = new AtomicBoolean(); diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java index 2dd5df5080a..f76947b2c89 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.java @@ -18,11 +18,11 @@ */ package org.apache.hadoop.hbase.zookeeper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tracks the availability and value of a single ZooKeeper node. @@ -36,7 +36,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Private public abstract class ZKNodeTracker extends ZKListener { // LOG is being used in subclasses, hence keeping it protected - protected static final Log LOG = LogFactory.getLog(ZKNodeTracker.class); + protected static final Logger LOG = LoggerFactory.getLogger(ZKNodeTracker.class); /** Path of node being tracked */ protected final String node; diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java index da45f9dc92e..b58c98cde6d 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java @@ -22,12 +22,12 @@ import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Common methods and attributes used by SplitLogManager and SplitLogWorker running distributed @@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public final class ZKSplitLog { - private static final Log LOG = LogFactory.getLog(ZKSplitLog.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKSplitLog.class); private ZKSplitLog() { } diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java index a4edbb3507e..3ef248d093a 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java @@ -39,8 +39,6 @@ import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -73,6 +71,8 @@ import org.apache.zookeeper.proto.CreateRequest; import org.apache.zookeeper.proto.DeleteRequest; import org.apache.zookeeper.proto.SetDataRequest; import org.apache.zookeeper.server.ZooKeeperSaslServer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -89,7 +89,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos; */ @InterfaceAudience.Private public final class ZKUtil { - private static final Log LOG = LogFactory.getLog(ZKUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKUtil.class); private static int zkDumpConnectionTimeOut; diff --git a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java index 5d48117bf1b..b0c6a6f0c3d 100644 --- a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java +++ b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKWatcher.java @@ -27,8 +27,6 @@ import java.util.concurrent.CountDownLatch; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.AuthUtil; @@ -44,6 +42,8 @@ import org.apache.zookeeper.ZooDefs.Perms; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Id; import org.apache.zookeeper.data.Stat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Acts as the single ZooKeeper Watcher. One instance of this is instantiated @@ -58,7 +58,7 @@ import org.apache.zookeeper.data.Stat; */ @InterfaceAudience.Private public class ZKWatcher implements Watcher, Abortable, Closeable { - private static final Log LOG = LogFactory.getLog(ZKWatcher.class); + private static final Logger LOG = LoggerFactory.getLogger(ZKWatcher.class); // Identifier for this watcher (for logging only). It is made of the prefix // passed on construction and the zookeeper sessionid. diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java index fe282f52194..95ffbb437c9 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java @@ -25,13 +25,12 @@ import static org.junit.Assert.fail; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseZKTestingUtility; import org.apache.hadoop.hbase.Stoppable; +import org.apache.hadoop.hbase.log.HBaseMarkers; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.ZKTests; import org.apache.hadoop.hbase.util.Bytes; @@ -39,10 +38,12 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ZKTests.class, MediumTests.class }) public class TestZKLeaderManager { - private static final Log LOG = LogFactory.getLog(TestZKLeaderManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKLeaderManager.class); private static final String LEADER_ZNODE = "/test/" + TestZKLeaderManager.class.getSimpleName(); @@ -53,7 +54,7 @@ public class TestZKLeaderManager { @Override public void abort(String why, Throwable e) { aborted = true; - LOG.fatal("Aborting during test: "+why, e); + LOG.error(HBaseMarkers.FATAL, "Aborting during test: "+why, e); fail("Aborted during test: " + why); } @@ -91,6 +92,7 @@ public class TestZKLeaderManager { return watcher; } + @Override public void run() { while (!stopped) { zkLeader.start(); diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java index 3cc3815894b..e6fe8108000 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseZKTestingUtility; @@ -43,13 +41,15 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test ZooKeeper multi-update functionality */ @Category({ ZKTests.class, MediumTests.class }) public class TestZKMulti { - private static final Log LOG = LogFactory.getLog(TestZKMulti.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKMulti.class); private final static HBaseZKTestingUtility TEST_UTIL = new HBaseZKTestingUtility(); private static ZKWatcher zkw = null; diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java index f8aa7c3c660..3778ca0119c 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java @@ -28,8 +28,6 @@ import java.io.IOException; import java.util.concurrent.Semaphore; import java.util.concurrent.ThreadLocalRandom; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HBaseZKTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -45,10 +43,12 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Category({ ZKTests.class, MediumTests.class }) public class TestZKNodeTracker { - private static final Log LOG = LogFactory.getLog(TestZKNodeTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(TestZKNodeTracker.class); private final static HBaseZKTestingUtility TEST_UTIL = new HBaseZKTestingUtility(); @BeforeClass @@ -244,7 +244,7 @@ public class TestZKNodeTracker { } public static class TestingZKListener extends ZKListener { - private static final Log LOG = LogFactory.getLog(TestingZKListener.class); + private static final Logger LOG = LoggerFactory.getLogger(TestingZKListener.class); private Semaphore deletedLock; private Semaphore createdLock; diff --git a/pom.xml b/pom.xml index cba3e16a703..3eae72da91c 100755 --- a/pom.xml +++ b/pom.xml @@ -1329,7 +1329,6 @@ 2.5 3.6 - 1.2 3.6.1 3.3.6 @@ -1360,7 +1359,7 @@ thrift 0.9.3 3.4.10 - 1.7.24 + 1.7.25 4.0.3 2.4.1 1.3.8 @@ -1709,6 +1708,11 @@ jettison ${jettison.version} + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + log4j log4j @@ -1767,11 +1771,6 @@ commons-lang3 ${commons-lang3.version} - - commons-logging - commons-logging - ${commons-logging.version} - org.apache.commons commons-math3