HBASE-10092 Move up on to log4j2
Changes: - replaced commons-logging to slf4j everywhere - log.XXX(Throwable) calls were replaced with log.XXX(t.toString(), t) - log.XXX(Object) calls were replaced with log.XXX(Objects.toString(obj)) - log.fatal() calls were replaced with log.error(HBaseMarkers.FATAL, ...) - programmatic log4j configuration was removed from the unit test This commit does not affect the current logging configurations, because log4j is still on the classpath. slf4j-log4j12 binds log4j to slf4j. Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
parent
13d9e8088c
commit
992b5d8630
|
@ -132,8 +132,8 @@
|
|||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase.thirdparty</groupId>
|
||||
|
|
|
@ -33,8 +33,6 @@ import java.util.regex.Matcher;
|
|||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.MetaTableAccessor.CollectingVisitor;
|
||||
import org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
|
||||
import org.apache.hadoop.hbase.MetaTableAccessor.Visitor;
|
||||
|
@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The asynchronous meta table accessor. Used to read/write region and assignment information store
|
||||
|
@ -62,7 +62,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
@InterfaceAudience.Private
|
||||
public class AsyncMetaTableAccessor {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncMetaTableAccessor.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncMetaTableAccessor.class);
|
||||
|
||||
|
||||
/** The delimiter for meta columns for replicaIds > 0 */
|
||||
|
|
|
@ -25,8 +25,6 @@ import java.util.Arrays;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.KeyValue.KVComparator;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
|
@ -37,7 +35,8 @@ import org.apache.hadoop.hbase.master.RegionState;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
|
||||
|
@ -76,7 +75,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
|||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
public class HRegionInfo implements RegionInfo, Comparable<HRegionInfo> {
|
||||
private static final Log LOG = LogFactory.getLog(HRegionInfo.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HRegionInfo.class);
|
||||
|
||||
/**
|
||||
* The new format for a region name contains its encodedName at the end.
|
||||
|
|
|
@ -34,8 +34,6 @@ import java.util.TreeMap;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Cell.DataType;
|
||||
import org.apache.hadoop.hbase.client.Connection;
|
||||
|
@ -71,7 +69,8 @@ import org.apache.hadoop.hbase.util.ExceptionUtil;
|
|||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.hbase.util.PairOfSameType;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import edu.umd.cs.findbugs.annotations.Nullable;
|
||||
|
@ -142,8 +141,8 @@ public class MetaTableAccessor {
|
|||
* separated by ","
|
||||
*/
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(MetaTableAccessor.class);
|
||||
private static final Log METALOG = LogFactory.getLog("org.apache.hadoop.hbase.META");
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MetaTableAccessor.class);
|
||||
private static final Logger METALOG = LoggerFactory.getLogger("org.apache.hadoop.hbase.META");
|
||||
|
||||
// Save its daughter/parent region(s) when split/merge
|
||||
private static final byte[] daughterNameCq = Bytes.toBytes("_DAUGHTER_");
|
||||
|
|
|
@ -44,14 +44,14 @@ import java.util.function.Supplier;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.CellScannable;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.MultiResponse.RegionResult;
|
||||
import org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
|
@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
|||
@InterfaceAudience.Private
|
||||
class AsyncBatchRpcRetryingCaller<T> {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncBatchRpcRetryingCaller.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncBatchRpcRetryingCaller.class);
|
||||
|
||||
private final HashedWheelTimer retryTimer;
|
||||
|
||||
|
|
|
@ -34,13 +34,13 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.ipc.RpcClient;
|
||||
import org.apache.hadoop.hbase.ipc.RpcClientFactory;
|
||||
|
@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.util.Threads;
|
|||
@InterfaceAudience.Private
|
||||
class AsyncConnectionImpl implements AsyncConnection {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncConnectionImpl.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncConnectionImpl.class);
|
||||
|
||||
@VisibleForTesting
|
||||
static final HashedWheelTimer RETRY_TIMER = new HashedWheelTimer(
|
||||
|
|
|
@ -21,10 +21,10 @@ import static org.apache.hadoop.hbase.client.AsyncRegionLocator.*;
|
|||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The asynchronous locator for meta region.
|
||||
|
@ -32,7 +32,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
@InterfaceAudience.Private
|
||||
class AsyncMetaRegionLocator {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncMetaRegionLocator.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncMetaRegionLocator.class);
|
||||
|
||||
private final AsyncRegistry registry;
|
||||
|
||||
|
|
|
@ -42,8 +42,6 @@ import java.util.concurrent.ConcurrentMap;
|
|||
import java.util.concurrent.ConcurrentNavigableMap;
|
||||
import java.util.concurrent.ConcurrentSkipListMap;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.MetaTableAccessor;
|
||||
|
@ -52,6 +50,8 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The asynchronous locator for regions other than meta.
|
||||
|
@ -59,7 +59,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
@InterfaceAudience.Private
|
||||
class AsyncNonMetaRegionLocator {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncNonMetaRegionLocator.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncNonMetaRegionLocator.class);
|
||||
|
||||
static final String MAX_CONCURRENT_LOCATE_REQUEST_PER_TABLE =
|
||||
"hbase.client.meta.max.concurrent.locate.per.table";
|
||||
|
|
|
@ -34,8 +34,6 @@ import java.util.Objects;
|
|||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
|
@ -44,6 +42,8 @@ import org.apache.hadoop.hbase.ServerName;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows;
|
||||
import org.apache.hadoop.hbase.client.RequestController.ReturnCode;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
|
@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
class AsyncProcess {
|
||||
private static final Log LOG = LogFactory.getLog(AsyncProcess.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncProcess.class);
|
||||
private static final AtomicLong COUNTER = new AtomicLong();
|
||||
|
||||
public static final String PRIMARY_CALL_TIMEOUT_KEY = "hbase.client.primaryCallTimeout.multiget";
|
||||
|
|
|
@ -30,11 +30,11 @@ import java.util.function.Consumer;
|
|||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.exceptions.RegionMovedException;
|
||||
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
@InterfaceAudience.Private
|
||||
class AsyncRegionLocator {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncRegionLocator.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncRegionLocator.class);
|
||||
|
||||
private final HashedWheelTimer retryTimer;
|
||||
|
||||
|
|
|
@ -37,8 +37,6 @@ import java.util.concurrent.RejectedExecutionException;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.CallQueueTooBigException;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
@ -50,6 +48,8 @@ import org.apache.hadoop.hbase.ServerName;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.backoff.ServerStatistics;
|
||||
import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
||||
import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
|
||||
|
@ -71,7 +71,7 @@ import org.apache.htrace.core.Tracer;
|
|||
@InterfaceAudience.Private
|
||||
class AsyncRequestFutureImpl<CResult> implements AsyncRequestFuture {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncRequestFutureImpl.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncRequestFutureImpl.class);
|
||||
|
||||
private RetryingTimeTracker tracker;
|
||||
|
||||
|
|
|
@ -32,17 +32,17 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public abstract class AsyncRpcRetryingCaller<T> {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncRpcRetryingCaller.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncRpcRetryingCaller.class);
|
||||
|
||||
private final HashedWheelTimer retryTimer;
|
||||
|
||||
|
|
|
@ -35,8 +35,6 @@ import java.util.Optional;
|
|||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||
|
@ -49,7 +47,8 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
|||
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer;
|
||||
import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout;
|
||||
|
@ -70,7 +69,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRespon
|
|||
@InterfaceAudience.Private
|
||||
class AsyncScanSingleRegionRpcRetryingCaller {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncScanSingleRegionRpcRetryingCaller.class);
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(AsyncScanSingleRegionRpcRetryingCaller.class);
|
||||
|
||||
private final HashedWheelTimer retryTimer;
|
||||
|
||||
|
|
|
@ -24,9 +24,9 @@ import java.io.InterruptedIOException;
|
|||
import java.util.ArrayDeque;
|
||||
import java.util.Queue;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
|
||||
|
@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
|
|||
@InterfaceAudience.Private
|
||||
class AsyncTableResultScanner implements ResultScanner, AdvancedScanResultConsumer {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncTableResultScanner.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncTableResultScanner.class);
|
||||
|
||||
private final AsyncTable<AdvancedScanResultConsumer> rawTable;
|
||||
|
||||
|
|
|
@ -19,15 +19,15 @@
|
|||
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
class BatchErrors {
|
||||
private static final Log LOG = LogFactory.getLog(BatchErrors.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(BatchErrors.class);
|
||||
final List<Throwable> throwables = new ArrayList<>();
|
||||
final List<Row> actions = new ArrayList<>();
|
||||
final List<String> addresses = new ArrayList<>();
|
||||
|
|
|
@ -15,13 +15,6 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.util.Collections;
|
||||
|
@ -33,8 +26,16 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -59,7 +60,7 @@ import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
|||
@InterfaceStability.Evolving
|
||||
public class BufferedMutatorImpl implements BufferedMutator {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(BufferedMutatorImpl.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorImpl.class);
|
||||
|
||||
private final ExceptionListener listener;
|
||||
|
||||
|
|
|
@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.client;
|
|||
import java.io.IOException;
|
||||
import java.lang.management.ManagementFactory;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.Addressing;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
final class ClientIdGenerator {
|
||||
private static final Log LOG = LogFactory.getLog(ClientIdGenerator.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ClientIdGenerator.class);
|
||||
|
||||
private ClientIdGenerator() {}
|
||||
|
||||
|
|
|
@ -30,8 +30,6 @@ import java.util.Queue;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
import org.apache.commons.lang3.mutable.MutableBoolean;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
|
@ -41,6 +39,8 @@ import org.apache.hadoop.hbase.NotServingRegionException;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults;
|
||||
import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
|
||||
import org.apache.hadoop.hbase.exceptions.ScannerResetException;
|
||||
|
@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
@InterfaceAudience.Private
|
||||
public abstract class ClientScanner extends AbstractClientScanner {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ClientScanner.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ClientScanner.class);
|
||||
|
||||
protected final Scan scan;
|
||||
protected boolean closed = false;
|
||||
|
|
|
@ -31,8 +31,6 @@ import java.net.UnknownHostException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.ClusterStatus;
|
||||
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
||||
|
@ -42,7 +40,8 @@ import org.apache.hadoop.hbase.util.Addressing;
|
|||
import org.apache.hadoop.hbase.util.ExceptionUtil;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.io.netty.bootstrap.Bootstrap;
|
||||
import org.apache.hadoop.hbase.shaded.io.netty.buffer.ByteBufInputStream;
|
||||
import org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContext;
|
||||
|
@ -63,7 +62,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
class ClusterStatusListener implements Closeable {
|
||||
private static final Log LOG = LogFactory.getLog(ClusterStatusListener.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ClusterStatusListener.class);
|
||||
private final List<ServerName> deadServers = new ArrayList<>();
|
||||
protected final DeadServerHandler deadServerHandler;
|
||||
private final Listener listener;
|
||||
|
|
|
@ -49,8 +49,6 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CallQueueTooBigException;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
|
@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.exceptions.RegionMovedException;
|
|||
import org.apache.hadoop.hbase.ipc.RpcClient;
|
||||
import org.apache.hadoop.hbase.ipc.RpcClientFactory;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.log.HBaseMarkers;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
|
||||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -82,7 +81,8 @@ import org.apache.hadoop.hbase.util.Threads;
|
|||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
|
@ -138,7 +138,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Updat
|
|||
@InterfaceAudience.Private
|
||||
class ConnectionImplementation implements ClusterConnection, Closeable {
|
||||
public static final String RETRIES_BY_SERVER_KEY = "hbase.client.retries.by.server";
|
||||
private static final Log LOG = LogFactory.getLog(ConnectionImplementation.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ConnectionImplementation.class);
|
||||
|
||||
private static final String RESOLVE_HOSTNAME_ON_FAIL_KEY = "hbase.resolve.hostnames.on.failure";
|
||||
|
||||
|
@ -1882,9 +1882,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
|
|||
@Override
|
||||
public void abort(final String msg, Throwable t) {
|
||||
if (t != null) {
|
||||
LOG.fatal(msg, t);
|
||||
LOG.error(HBaseMarkers.FATAL, msg, t);
|
||||
} else {
|
||||
LOG.fatal(msg);
|
||||
LOG.error(HBaseMarkers.FATAL, msg);
|
||||
}
|
||||
this.aborted = true;
|
||||
close();
|
||||
|
|
|
@ -32,8 +32,6 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellComparator;
|
||||
|
@ -51,7 +49,8 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
|
|||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.hadoop.net.DNS;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
|
@ -67,7 +66,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterServ
|
|||
@InterfaceAudience.Private
|
||||
public final class ConnectionUtils {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ConnectionUtils.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ConnectionUtils.class);
|
||||
|
||||
private ConnectionUtils() {
|
||||
}
|
||||
|
@ -110,7 +109,7 @@ public final class ConnectionUtils {
|
|||
* @param log Used to log what we set in here.
|
||||
*/
|
||||
public static void setServerSideHConnectionRetriesConfig(final Configuration c, final String sn,
|
||||
final Log log) {
|
||||
final Logger log) {
|
||||
// TODO: Fix this. Not all connections from server side should have 10 times the retries.
|
||||
int hcRetries = c.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,
|
||||
HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -40,7 +40,7 @@ import java.util.Map;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class DelayingRunner implements Runnable {
|
||||
private static final Log LOG = LogFactory.getLog(DelayingRunner.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(DelayingRunner.class);
|
||||
|
||||
private final Object sleepLock = new Object();
|
||||
private boolean triggerWake = false;
|
||||
|
|
|
@ -20,15 +20,14 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
|
||||
|
@ -38,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegion
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class FlushRegionCallable extends RegionAdminServiceCallable<FlushRegionResponse> {
|
||||
private static final Log LOG = LogFactory.getLog(FlushRegionCallable.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(FlushRegionCallable.class);
|
||||
private final byte[] regionName;
|
||||
private final boolean writeFlushWalMarker;
|
||||
private boolean reload;
|
||||
|
|
|
@ -30,10 +30,10 @@ import java.util.Set;
|
|||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.security.access.Permission;
|
||||
|
@ -66,7 +66,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
@InterfaceAudience.Public
|
||||
public class Get extends Query
|
||||
implements Row, Comparable<Row> {
|
||||
private static final Log LOG = LogFactory.getLog(Get.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Get.class);
|
||||
|
||||
private byte [] row = null;
|
||||
private int maxVersions = 1;
|
||||
|
|
|
@ -46,8 +46,6 @@ import java.util.regex.Pattern;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Abortable;
|
||||
import org.apache.hadoop.hbase.CacheEvictionStats;
|
||||
|
@ -103,6 +101,8 @@ import org.apache.hadoop.ipc.RemoteException;
|
|||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
|
@ -223,7 +223,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public class HBaseAdmin implements Admin {
|
||||
private static final Log LOG = LogFactory.getLog(HBaseAdmin.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HBaseAdmin.class);
|
||||
|
||||
private ClusterConnection connection;
|
||||
|
||||
|
|
|
@ -26,8 +26,6 @@ import com.google.protobuf.Message;
|
|||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CompareOperator;
|
||||
|
@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
||||
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
|
||||
import org.apache.hadoop.hbase.filter.BinaryComparator;
|
||||
|
@ -103,7 +103,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Stable
|
||||
public class HTable implements Table {
|
||||
private static final Log LOG = LogFactory.getLog(HTable.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HTable.class);
|
||||
private static final Consistency DEFAULT_CONSISTENCY = Consistency.STRONG;
|
||||
private final ClusterConnection connection;
|
||||
private final TableName tableName;
|
||||
|
|
|
@ -35,8 +35,6 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
@ -46,7 +44,8 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
|
||||
|
@ -66,7 +65,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class HTableMultiplexer {
|
||||
private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName());
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HTableMultiplexer.class.getName());
|
||||
|
||||
public static final String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS =
|
||||
"hbase.tablemultiplexer.flush.period.ms";
|
||||
|
|
|
@ -27,8 +27,6 @@ import java.util.concurrent.ConcurrentMap;
|
|||
import java.util.concurrent.ConcurrentNavigableMap;
|
||||
import java.util.concurrent.CopyOnWriteArraySet;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.RegionLocations;
|
||||
|
@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.TableName;
|
|||
import org.apache.hadoop.hbase.types.CopyOnWriteArrayMap;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* A cache implementation for region locations from meta.
|
||||
|
@ -44,7 +44,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
@InterfaceAudience.Private
|
||||
public class MetaCache {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(MetaCache.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MetaCache.class);
|
||||
|
||||
/**
|
||||
* Map of table to table {@link HRegionLocation}s.
|
||||
|
|
|
@ -27,12 +27,12 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.apache.commons.lang3.mutable.MutableBoolean;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil;
|
||||
import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException;
|
||||
import org.apache.hadoop.hbase.ipc.CallTimeoutException;
|
||||
|
@ -66,8 +66,8 @@ import org.apache.hadoop.ipc.RemoteException;
|
|||
@InterfaceAudience.Private
|
||||
class PreemptiveFastFailInterceptor extends RetryingCallerInterceptor {
|
||||
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(PreemptiveFastFailInterceptor.class);
|
||||
private static final Logger LOG = LoggerFactory
|
||||
.getLogger(PreemptiveFastFailInterceptor.class);
|
||||
|
||||
// amount of time to wait before we consider a server to be in fast fail
|
||||
// mode
|
||||
|
|
|
@ -45,8 +45,6 @@ import java.util.stream.Collectors;
|
|||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
|
||||
import org.apache.hadoop.hbase.ClusterStatus;
|
||||
|
@ -87,7 +85,8 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
|
||||
import org.apache.hadoop.hbase.shaded.io.netty.util.HashedWheelTimer;
|
||||
import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout;
|
||||
|
@ -270,7 +269,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;
|
|||
class RawAsyncHBaseAdmin implements AsyncAdmin {
|
||||
public static final String FLUSH_TABLE_PROCEDURE_SIGNATURE = "flush-table-proc";
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AsyncHBaseAdmin.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AsyncHBaseAdmin.class);
|
||||
|
||||
private final AsyncConnectionImpl connection;
|
||||
|
||||
|
|
|
@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
|
||||
|
@ -43,7 +43,7 @@ import com.google.protobuf.RpcController;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
class RegionCoprocessorRpcChannel extends SyncCoprocessorRpcChannel {
|
||||
private static final Log LOG = LogFactory.getLog(RegionCoprocessorRpcChannel.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RegionCoprocessorRpcChannel.class);
|
||||
private final TableName table;
|
||||
private final byte [] row;
|
||||
private final ClusterConnection conn;
|
||||
|
|
|
@ -21,16 +21,16 @@ package org.apache.hadoop.hbase.client;
|
|||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class RegionInfoBuilder {
|
||||
private static final Log LOG = LogFactory.getLog(RegionInfoBuilder.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RegionInfoBuilder.class);
|
||||
|
||||
/** A non-capture group so that this can be embedded. */
|
||||
public static final String ENCODED_REGION_NAME_REGEX = "(?:[a-f0-9]+)";
|
||||
|
|
|
@ -26,10 +26,10 @@ import java.util.concurrent.RunnableFuture;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
|
||||
/**
|
||||
|
@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class ResultBoundedCompletionService<V> {
|
||||
private static final Log LOG = LogFactory.getLog(ResultBoundedCompletionService.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ResultBoundedCompletionService.class);
|
||||
private final RpcRetryingCallerFactory retryingCallerFactory;
|
||||
private final Executor executor;
|
||||
private final QueueingFuture<V>[] tasks; // all the tasks
|
||||
|
|
|
@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.client;
|
|||
|
||||
import java.lang.reflect.Constructor;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Factory implementation to provide the {@link ConnectionImplementation} with
|
||||
|
@ -35,8 +35,8 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
|
||||
@InterfaceAudience.Private
|
||||
class RetryingCallerInterceptorFactory {
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(RetryingCallerInterceptorFactory.class);
|
||||
private static final Logger LOG = LoggerFactory
|
||||
.getLogger(RetryingCallerInterceptorFactory.class);
|
||||
private Configuration conf;
|
||||
private final boolean failFast;
|
||||
public static final RetryingCallerInterceptor NO_OP_INTERCEPTOR =
|
||||
|
|
|
@ -17,12 +17,12 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.util.ReflectionUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Factory to create an {@link RpcRetryingCaller}
|
||||
|
@ -32,7 +32,7 @@ public class RpcRetryingCallerFactory {
|
|||
|
||||
/** Configuration key for a custom {@link RpcRetryingCaller} */
|
||||
public static final String CUSTOM_CALLER_CONF_KEY = "hbase.rpc.callerfactory.class";
|
||||
private static final Log LOG = LogFactory.getLog(RpcRetryingCallerFactory.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RpcRetryingCallerFactory.class);
|
||||
protected final Configuration conf;
|
||||
private final long pause;
|
||||
private final long pauseForCQTBE;// pause for CallQueueTooBigException, if specified
|
||||
|
|
|
@ -28,8 +28,7 @@ import java.net.SocketTimeoutException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.CallQueueTooBigException;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException;
|
||||
|
@ -38,7 +37,8 @@ import org.apache.hadoop.hbase.util.ExceptionUtil;
|
|||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
|
@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
|||
@InterfaceAudience.Private
|
||||
public class RpcRetryingCallerImpl<T> implements RpcRetryingCaller<T> {
|
||||
// LOG is being used in TestMultiRowRangeFilter, hence leaving it public
|
||||
public static final Log LOG = LogFactory.getLog(RpcRetryingCallerImpl.class);
|
||||
public static final Logger LOG = LoggerFactory.getLogger(RpcRetryingCallerImpl.class);
|
||||
|
||||
/** How many retries are allowed before we start to log */
|
||||
private final int startLogErrorsCnt;
|
||||
|
|
|
@ -28,8 +28,6 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HBaseIOException;
|
||||
|
@ -37,6 +35,8 @@ import org.apache.hadoop.hbase.HRegionLocation;
|
|||
import org.apache.hadoop.hbase.RegionLocations;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
|
@ -54,7 +54,9 @@ import static org.apache.hadoop.hbase.HConstants.PRIORITY_UNSET;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class RpcRetryingCallerWithReadReplicas {
|
||||
private static final Log LOG = LogFactory.getLog(RpcRetryingCallerWithReadReplicas.class);
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(RpcRetryingCallerWithReadReplicas.class);
|
||||
|
||||
protected final ExecutorService pool;
|
||||
protected final ClusterConnection cConnection;
|
||||
protected final Configuration conf;
|
||||
|
|
|
@ -29,10 +29,10 @@ import java.util.NavigableSet;
|
|||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
|
||||
|
@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class Scan extends Query {
|
||||
private static final Log LOG = LogFactory.getLog(Scan.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Scan.class);
|
||||
|
||||
private static final String RAW_ATTR = "_raw_";
|
||||
|
||||
|
|
|
@ -27,8 +27,6 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.updateServerSideMet
|
|||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HBaseIOException;
|
||||
|
@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.ServerName;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||
import org.apache.hadoop.hbase.exceptions.ScannerResetException;
|
||||
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||
|
@ -62,7 +62,7 @@ public class ScannerCallable extends ClientServiceCallable<Result[]> {
|
|||
public static final String LOG_SCANNER_ACTIVITY = "hbase.client.log.scanner.activity";
|
||||
|
||||
// Keeping LOG public as it is being used in TestScannerHeartbeatMessages
|
||||
public static final Log LOG = LogFactory.getLog(ScannerCallable.class);
|
||||
public static final Logger LOG = LoggerFactory.getLogger(ScannerCallable.class);
|
||||
protected long scannerId = -1L;
|
||||
protected boolean instantiated = false;
|
||||
protected boolean closed = false;
|
||||
|
|
|
@ -32,14 +32,14 @@ import java.util.concurrent.Future;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.RegionLocations;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.ScannerCallable.MoreResults;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
||||
|
@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.Pair;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
class ScannerCallableWithReplicas implements RetryingCallable<Result[]> {
|
||||
private static final Log LOG = LogFactory.getLog(ScannerCallableWithReplicas.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ScannerCallableWithReplicas.class);
|
||||
volatile ScannerCallable currentScannerCallable;
|
||||
AtomicBoolean replicaSwitched = new AtomicBoolean(false);
|
||||
final ClusterConnection cConnection;
|
||||
|
|
|
@ -34,8 +34,7 @@ import java.util.concurrent.ConcurrentSkipListMap;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
|
@ -43,6 +42,8 @@ import org.apache.hadoop.hbase.HRegionLocation;
|
|||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdge;
|
||||
|
@ -54,7 +55,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
class SimpleRequestController implements RequestController {
|
||||
private static final Log LOG = LogFactory.getLog(SimpleRequestController.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(SimpleRequestController.class);
|
||||
/**
|
||||
* The maximum heap size for each request.
|
||||
*/
|
||||
|
|
|
@ -25,9 +25,9 @@ import com.google.protobuf.ServiceException;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
|
||||
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
|
||||
|
||||
|
@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
abstract class SyncCoprocessorRpcChannel implements CoprocessorRpcChannel {
|
||||
private static final Log LOG = LogFactory.getLog(SyncCoprocessorRpcChannel.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(SyncCoprocessorRpcChannel.class);
|
||||
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
|
|
|
@ -34,8 +34,6 @@ import java.util.function.Function;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.Coprocessor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
@ -46,13 +44,15 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @since 2.0.0
|
||||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class TableDescriptorBuilder {
|
||||
public static final Log LOG = LogFactory.getLog(TableDescriptorBuilder.class);
|
||||
public static final Logger LOG = LoggerFactory.getLogger(TableDescriptorBuilder.class);
|
||||
@InterfaceAudience.Private
|
||||
public static final String SPLIT_POLICY = "SPLIT_POLICY";
|
||||
private static final Bytes SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
|
||||
|
|
|
@ -28,8 +28,6 @@ import java.io.IOException;
|
|||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
import org.apache.commons.lang3.mutable.MutableInt;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.ClusterId;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
|
@ -41,7 +39,8 @@ import org.apache.hadoop.hbase.util.Pair;
|
|||
import org.apache.hadoop.hbase.zookeeper.ReadOnlyZKClient;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
|
||||
|
@ -52,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
|
|||
@InterfaceAudience.Private
|
||||
class ZKAsyncRegistry implements AsyncRegistry {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ZKAsyncRegistry.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ZKAsyncRegistry.class);
|
||||
|
||||
private final ReadOnlyZKClient zk;
|
||||
|
||||
|
|
|
@ -17,19 +17,19 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client.backoff;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.ReflectionUtils;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public final class ClientBackoffPolicyFactory {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ClientBackoffPolicyFactory.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ClientBackoffPolicyFactory.class);
|
||||
|
||||
private ClientBackoffPolicyFactory() {
|
||||
}
|
||||
|
|
|
@ -17,13 +17,12 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client.backoff;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
||||
/**
|
||||
|
@ -33,7 +32,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
|||
@InterfaceAudience.Public
|
||||
public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ExponentialClientBackoffPolicy.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ExponentialClientBackoffPolicy.class);
|
||||
|
||||
private static final long ONE_MINUTE = 60 * 1000;
|
||||
public static final long DEFAULT_MAX_BACKOFF = 5 * ONE_MINUTE;
|
||||
|
|
|
@ -30,13 +30,13 @@ import java.util.Set;
|
|||
import java.util.TreeMap;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.Admin;
|
||||
import org.apache.hadoop.hbase.client.Connection;
|
||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
||||
|
@ -72,7 +72,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
|||
@InterfaceAudience.Public
|
||||
@Deprecated
|
||||
public class ReplicationAdmin implements Closeable {
|
||||
private static final Log LOG = LogFactory.getLog(ReplicationAdmin.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ReplicationAdmin.class);
|
||||
|
||||
public static final String TNAME = "tableName";
|
||||
public static final String CFNAME = "columnFamilyName";
|
||||
|
|
|
@ -18,37 +18,38 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client.replication;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.CompoundConfiguration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationException;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.Strings;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
|
||||
|
||||
/**
|
||||
* Helper for TableCFs Operations.
|
||||
|
@ -57,7 +58,7 @@ import java.util.stream.Collectors;
|
|||
@InterfaceStability.Stable
|
||||
public final class ReplicationPeerConfigUtil {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ReplicationPeerConfigUtil.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerConfigUtil.class);
|
||||
|
||||
private ReplicationPeerConfigUtil() {}
|
||||
|
||||
|
|
|
@ -17,13 +17,13 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.exceptions;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Subclass if the server knows the region is now on another server.
|
||||
|
@ -32,7 +32,7 @@ import org.apache.yetus.audience.InterfaceStability;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public class RegionMovedException extends NotServingRegionException {
|
||||
private static final Log LOG = LogFactory.getLog(RegionMovedException.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RegionMovedException.class);
|
||||
private static final long serialVersionUID = -7232903522310558396L;
|
||||
|
||||
private final String hostname;
|
||||
|
|
|
@ -18,11 +18,11 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.exceptions;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Subclass if the server knows the region is now on another server.
|
||||
|
@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceStability;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public class RegionOpeningException extends NotServingRegionException {
|
||||
private static final Log LOG = LogFactory.getLog(RegionOpeningException.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RegionOpeningException.class);
|
||||
private static final long serialVersionUID = -7232903522310558395L;
|
||||
|
||||
public RegionOpeningException(String message) {
|
||||
|
|
|
@ -31,10 +31,10 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
import java.util.Stack;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.CompareOperator;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
|
@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class ParseFilter {
|
||||
private static final Log LOG = LogFactory.getLog(ParseFilter.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ParseFilter.class);
|
||||
|
||||
private static HashMap<ByteBuffer, Integer> operatorPrecedenceHashMap;
|
||||
private static HashMap<String, String> filterHashMap;
|
||||
|
|
|
@ -23,8 +23,6 @@ import java.nio.charset.IllegalCharsetNameException;
|
|||
import java.util.Arrays;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
|
||||
|
@ -36,7 +34,8 @@ import org.joni.Matcher;
|
|||
import org.joni.Option;
|
||||
import org.joni.Regex;
|
||||
import org.joni.Syntax;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
|
@ -72,7 +71,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
|
|||
@InterfaceAudience.Public
|
||||
public class RegexStringComparator extends ByteArrayComparable {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(RegexStringComparator.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RegexStringComparator.class);
|
||||
|
||||
private Engine engine;
|
||||
|
||||
|
|
|
@ -49,12 +49,12 @@ import java.util.concurrent.ScheduledFuture;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.MetricsConnection;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.codec.KeyValueCodec;
|
||||
|
@ -94,7 +94,7 @@ import org.apache.hadoop.security.token.TokenSelector;
|
|||
@InterfaceAudience.Private
|
||||
public abstract class AbstractRpcClient<T extends RpcConnection> implements RpcClient {
|
||||
// Log level is being changed in tests
|
||||
public static final Log LOG = LogFactory.getLog(AbstractRpcClient.class);
|
||||
public static final Logger LOG = LoggerFactory.getLogger(AbstractRpcClient.class);
|
||||
|
||||
protected static final HashedWheelTimer WHEEL_TIMER = new HashedWheelTimer(
|
||||
Threads.newDaemonThreadFactory("RpcClient-timer"), 10, TimeUnit.MILLISECONDS);
|
||||
|
|
|
@ -44,14 +44,14 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import javax.security.sasl.SaslException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
|
||||
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
|
||||
import org.apache.hadoop.hbase.log.HBaseMarkers;
|
||||
import org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
|
||||
import org.apache.hadoop.hbase.security.SaslUtil;
|
||||
import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
|
||||
|
@ -65,7 +65,8 @@ import org.apache.hadoop.security.UserGroupInformation;
|
|||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.htrace.core.TraceScope;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
|
||||
|
@ -85,7 +86,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeade
|
|||
@InterfaceAudience.Private
|
||||
class BlockingRpcConnection extends RpcConnection implements Runnable {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(BlockingRpcConnection.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(BlockingRpcConnection.class);
|
||||
|
||||
private final BlockingRpcClient rpcClient;
|
||||
|
||||
|
@ -419,7 +420,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
|
|||
if (ex instanceof SaslException) {
|
||||
String msg = "SASL authentication failed."
|
||||
+ " The most likely cause is missing or invalid credentials." + " Consider 'kinit'.";
|
||||
LOG.fatal(msg, ex);
|
||||
LOG.error(HBaseMarkers.FATAL, msg, ex);
|
||||
throw new RuntimeException(msg, ex);
|
||||
}
|
||||
throw new IOException(ex);
|
||||
|
@ -568,8 +569,9 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
|
|||
}
|
||||
waitingConnectionHeaderResponse = false;
|
||||
} catch (SocketTimeoutException ste) {
|
||||
LOG.fatal("Can't get the connection header response for rpc timeout, please check if" +
|
||||
" server has the correct configuration to support the additional function.", ste);
|
||||
LOG.error(HBaseMarkers.FATAL, "Can't get the connection header response for rpc timeout, "
|
||||
+ "please check if server has the correct configuration to support the additional "
|
||||
+ "function.", ste);
|
||||
// timeout when waiting the connection header response, ignore the additional function
|
||||
throw new IOException("Timeout while waiting connection header response", ste);
|
||||
}
|
||||
|
|
|
@ -29,13 +29,13 @@ import java.nio.BufferOverflowException;
|
|||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configurable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.io.ByteBuffInputStream;
|
||||
import org.apache.hadoop.hbase.io.ByteBufferInputStream;
|
||||
|
@ -58,7 +58,7 @@ import org.apache.hadoop.io.compress.Decompressor;
|
|||
class CellBlockBuilder {
|
||||
|
||||
// LOG is being used in TestCellBlockBuilder
|
||||
static final Log LOG = LogFactory.getLog(CellBlockBuilder.class);
|
||||
static final Logger LOG = LoggerFactory.getLogger(CellBlockBuilder.class);
|
||||
|
||||
private final Configuration conf;
|
||||
|
||||
|
|
|
@ -23,11 +23,11 @@ import static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Regi
|
|||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
|
||||
|
@ -52,7 +52,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public final class CoprocessorRpcUtils {
|
||||
private static final Log LOG = LogFactory.getLog(CoprocessorRpcUtils.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(CoprocessorRpcUtils.class);
|
||||
/**
|
||||
* We assume that all HBase protobuf services share a common package name
|
||||
* (defined in the .proto files).
|
||||
|
|
|
@ -23,10 +23,10 @@ import java.util.Iterator;
|
|||
import java.util.LinkedList;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
||||
|
@ -38,7 +38,7 @@ public class FailedServers {
|
|||
private final Map<String, Long> failedServers = new HashMap<String, Long>();
|
||||
private long latestExpiry = 0;
|
||||
private final int recheckServersTimeout;
|
||||
private static final Log LOG = LogFactory.getLog(FailedServers.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(FailedServers.class);
|
||||
|
||||
public FailedServers(Configuration conf) {
|
||||
this.recheckServersTimeout = conf.getInt(
|
||||
|
|
|
@ -49,9 +49,9 @@ import java.util.concurrent.ScheduledExecutorService;
|
|||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.ipc.BufferCallBeforeInitHandler.BufferCallEvent;
|
||||
import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
|
||||
|
@ -70,7 +70,7 @@ import org.apache.hadoop.security.UserGroupInformation;
|
|||
@InterfaceAudience.Private
|
||||
class NettyRpcConnection extends RpcConnection {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(NettyRpcConnection.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(NettyRpcConnection.class);
|
||||
|
||||
private static final ScheduledExecutorService RELOGIN_EXECUTOR =
|
||||
Executors.newSingleThreadScheduledExecutor(Threads.newDaemonThreadFactory("Relogin"));
|
||||
|
|
|
@ -34,10 +34,10 @@ import java.io.IOException;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
|
||||
|
@ -53,7 +53,7 @@ import org.apache.hadoop.ipc.RemoteException;
|
|||
@InterfaceAudience.Private
|
||||
class NettyRpcDuplexHandler extends ChannelDuplexHandler {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(NettyRpcDuplexHandler.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(NettyRpcDuplexHandler.class);
|
||||
|
||||
private final NettyRpcConnection conn;
|
||||
|
||||
|
|
|
@ -25,11 +25,11 @@ import java.io.IOException;
|
|||
import java.net.UnknownHostException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
|
@ -52,7 +52,7 @@ import org.apache.hadoop.security.token.TokenSelector;
|
|||
@InterfaceAudience.Private
|
||||
abstract class RpcConnection {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(RpcConnection.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RpcConnection.class);
|
||||
|
||||
protected final ConnectionId remoteId;
|
||||
|
||||
|
|
|
@ -19,12 +19,12 @@ package org.apache.hadoop.hbase.ipc;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellScannable;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.ReflectionUtils;
|
||||
|
||||
/**
|
||||
|
@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class RpcControllerFactory {
|
||||
private static final Log LOG = LogFactory.getLog(RpcControllerFactory.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RpcControllerFactory.class);
|
||||
|
||||
/**
|
||||
* Custom RPC Controller factory allows frameworks to change the RPC controller. If the configured
|
||||
|
|
|
@ -25,11 +25,11 @@ import java.util.LinkedList;
|
|||
import java.util.Objects;
|
||||
import java.util.Queue;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.Connection;
|
||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
|
@ -44,7 +44,7 @@ import org.apache.hadoop.util.StringUtils;
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class QuotaRetriever implements Closeable, Iterable<QuotaSettings> {
|
||||
private static final Log LOG = LogFactory.getLog(QuotaRetriever.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(QuotaRetriever.class);
|
||||
|
||||
private final Queue<QuotaSettings> cache = new LinkedList<>();
|
||||
private ResultScanner scanner;
|
||||
|
|
|
@ -28,8 +28,6 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.CompareOperator;
|
||||
|
@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.ServerName;
|
|||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.ClusterConnection;
|
||||
import org.apache.hadoop.hbase.client.Connection;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
|
@ -89,7 +89,7 @@ import org.apache.hadoop.hbase.util.Strings;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public class QuotaTableUtil {
|
||||
private static final Log LOG = LogFactory.getLog(QuotaTableUtil.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(QuotaTableUtil.class);
|
||||
|
||||
/** System table for quotas */
|
||||
public static final TableName QUOTA_TABLE_NAME =
|
||||
|
|
|
@ -33,9 +33,9 @@ import javax.security.sasl.Sasl;
|
|||
import javax.security.sasl.SaslClient;
|
||||
import javax.security.sasl.SaslException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
|
||||
|
@ -47,7 +47,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
|
|||
@InterfaceAudience.Private
|
||||
public abstract class AbstractHBaseSaslRpcClient {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AbstractHBaseSaslRpcClient.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractHBaseSaslRpcClient.class);
|
||||
|
||||
private static final byte[] EMPTY_TOKEN = new byte[0];
|
||||
|
||||
|
|
|
@ -28,13 +28,13 @@ import java.util.Properties;
|
|||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import org.apache.commons.crypto.cipher.CryptoCipherFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
||||
import org.apache.hadoop.hbase.io.crypto.Cipher;
|
||||
import org.apache.hadoop.hbase.io.crypto.Encryption;
|
||||
|
@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public final class EncryptionUtil {
|
||||
static private final Log LOG = LogFactory.getLog(EncryptionUtil.class);
|
||||
static private final Logger LOG = LoggerFactory.getLogger(EncryptionUtil.class);
|
||||
|
||||
static private final SecureRandom RNG = new SecureRandom();
|
||||
|
||||
|
|
|
@ -32,10 +32,10 @@ import java.nio.ByteBuffer;
|
|||
import javax.security.sasl.Sasl;
|
||||
import javax.security.sasl.SaslException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.io.crypto.aes.CryptoAES;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
|
@ -52,7 +52,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
|
|||
@InterfaceAudience.Private
|
||||
public class HBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HBaseSaslRpcClient.class);
|
||||
private boolean cryptoAesEnable;
|
||||
private CryptoAES cryptoAES;
|
||||
private InputStream saslInputStream;
|
||||
|
|
|
@ -24,9 +24,9 @@ import java.io.IOException;
|
|||
|
||||
import javax.security.sasl.Sasl;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
|
||||
|
@ -36,7 +36,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class NettyHBaseSaslRpcClient extends AbstractHBaseSaslRpcClient {
|
||||
private static final Log LOG = LogFactory.getLog(NettyHBaseSaslRpcClient.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(NettyHBaseSaslRpcClient.class);
|
||||
|
||||
public NettyHBaseSaslRpcClient(AuthMethod method, Token<? extends TokenIdentifier> token,
|
||||
String serverPrincipal, boolean fallbackAllowed, String rpcProtection) throws IOException {
|
||||
|
|
|
@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.shaded.io.netty.util.concurrent.Promise;
|
|||
import java.io.IOException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.ipc.FallbackDisallowedException;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
|
@ -41,7 +41,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
|
|||
@InterfaceAudience.Private
|
||||
public class NettyHBaseSaslRpcClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(NettyHBaseSaslRpcClientHandler.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(NettyHBaseSaslRpcClientHandler.class);
|
||||
|
||||
private final Promise<Boolean> saslPromise;
|
||||
|
||||
|
|
|
@ -28,13 +28,13 @@ import javax.security.sasl.SaslException;
|
|||
import javax.security.sasl.SaslServer;
|
||||
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class SaslUtil {
|
||||
private static final Log LOG = LogFactory.getLog(SaslUtil.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(SaslUtil.class);
|
||||
public static final String SASL_DEFAULT_REALM = "default";
|
||||
public static final int SWITCH_TO_SIMPLE_AUTH = -88;
|
||||
|
||||
|
|
|
@ -24,9 +24,9 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.VersionedWritable;
|
||||
|
||||
|
@ -54,7 +54,7 @@ public class Permission extends VersionedWritable {
|
|||
public byte code() { return code; }
|
||||
}
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(Permission.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Permission.class);
|
||||
protected static final Map<Byte,Action> ACTION_BY_CODE = Maps.newHashMap();
|
||||
|
||||
protected Action[] actions;
|
||||
|
|
|
@ -22,10 +22,10 @@ import java.io.DataInput;
|
|||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class UserPermission extends TablePermission {
|
||||
private static final Log LOG = LogFactory.getLog(UserPermission.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(UserPermission.class);
|
||||
|
||||
private byte[] user;
|
||||
|
||||
|
|
|
@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.security.token;
|
|||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
|
@ -31,7 +31,7 @@ import org.apache.hadoop.security.token.TokenSelector;
|
|||
@InterfaceAudience.Private
|
||||
public class AuthenticationTokenSelector
|
||||
implements TokenSelector<AuthenticationTokenIdentifier> {
|
||||
private static final Log LOG = LogFactory.getLog(AuthenticationTokenSelector.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AuthenticationTokenSelector.class);
|
||||
|
||||
public AuthenticationTokenSelector() {
|
||||
}
|
||||
|
|
|
@ -23,8 +23,6 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
|
@ -36,7 +34,8 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController;
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
|
||||
|
@ -68,7 +67,7 @@ import edu.umd.cs.findbugs.annotations.Nullable;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public final class ResponseConverter {
|
||||
private static final Log LOG = LogFactory.getLog(ResponseConverter.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ResponseConverter.class);
|
||||
|
||||
private ResponseConverter() {
|
||||
}
|
||||
|
|
|
@ -30,15 +30,14 @@ import java.util.concurrent.Delayed;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.KeeperException.Code;
|
||||
import org.apache.zookeeper.ZooKeeper;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
|
@ -47,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
|
|||
@InterfaceAudience.Private
|
||||
public final class ReadOnlyZKClient implements Closeable {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(ReadOnlyZKClient.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ReadOnlyZKClient.class);
|
||||
|
||||
public static final String RECOVERY_RETRY = "zookeeper.recovery.retry";
|
||||
|
||||
|
|
|
@ -26,8 +26,6 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.client.Durability;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
|
@ -38,6 +36,8 @@ import org.junit.Rule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Test setting values in the descriptor
|
||||
|
@ -45,7 +45,7 @@ import org.junit.rules.TestName;
|
|||
@Category({MiscTests.class, SmallTests.class})
|
||||
@Deprecated
|
||||
public class TestHTableDescriptor {
|
||||
private static final Log LOG = LogFactory.getLog(TestHTableDescriptor.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestHTableDescriptor.class);
|
||||
|
||||
@Rule
|
||||
public TestName name = new TestName();
|
||||
|
|
|
@ -25,10 +25,9 @@ import java.lang.reflect.Method;
|
|||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
|
@ -43,6 +42,8 @@ import org.junit.Assert;
|
|||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Test cases for ensuring our client visible classes have annotations for
|
||||
|
@ -72,7 +73,7 @@ import org.junit.experimental.categories.Category;
|
|||
public class TestInterfaceAudienceAnnotations {
|
||||
|
||||
private static final String HBASE_PROTOBUF = "org.apache.hadoop.hbase.protobuf.generated";
|
||||
private static final Log LOG = LogFactory.getLog(TestInterfaceAudienceAnnotations.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAudienceAnnotations.class);
|
||||
|
||||
/** Selects classes with generated in their package name */
|
||||
static class GeneratedClassFilter implements ClassFinder.ClassFilter {
|
||||
|
@ -315,7 +316,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
if (!classes.isEmpty()) {
|
||||
LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:");
|
||||
for (Class<?> clazz : classes) {
|
||||
LOG.info(clazz);
|
||||
LOG.info(Objects.toString(clazz));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -358,7 +359,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
LOG.info("These are the @InterfaceAudience.Public classes that have @InterfaceStability " +
|
||||
"annotation:");
|
||||
for (Class<?> clazz : classes) {
|
||||
LOG.info(clazz);
|
||||
LOG.info(Objects.toString(clazz));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -403,7 +404,7 @@ public class TestInterfaceAudienceAnnotations {
|
|||
LOG.info("These are the @InterfaceAudience.LimitedPrivate classes that DO NOT " +
|
||||
"have @InterfaceStability annotation:");
|
||||
for (Class<?> clazz : classes) {
|
||||
LOG.info(clazz);
|
||||
LOG.info(Objects.toString(clazz));
|
||||
}
|
||||
}
|
||||
Assert.assertEquals("All classes that are marked with @InterfaceAudience.LimitedPrivate " +
|
||||
|
|
|
@ -50,8 +50,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CallQueueTooBigException;
|
||||
import org.apache.hadoop.hbase.CategoryBasedTimeout;
|
||||
|
@ -80,12 +78,14 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.mockito.Mockito;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ClientTests.class, MediumTests.class})
|
||||
public class TestAsyncProcess {
|
||||
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
|
||||
withLookingForStuckThread(true).build();
|
||||
private static final Log LOG = LogFactory.getLog(TestAsyncProcess.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestAsyncProcess.class);
|
||||
private static final TableName DUMMY_TABLE =
|
||||
TableName.valueOf("DUMMY_TABLE");
|
||||
private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8);
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.net.SocketTimeoutException;
|
|||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Random;
|
||||
import java.util.SortedMap;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
@ -35,8 +36,6 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.hbase.CellComparatorImpl;
|
||||
|
@ -65,7 +64,8 @@ import org.junit.Ignore;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
|
@ -102,7 +102,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpeci
|
|||
*/
|
||||
@Category({ClientTests.class, SmallTests.class})
|
||||
public class TestClientNoCluster extends Configured implements Tool {
|
||||
private static final Log LOG = LogFactory.getLog(TestClientNoCluster.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestClientNoCluster.class);
|
||||
private Configuration conf;
|
||||
public static final ServerName META_SERVERNAME =
|
||||
ServerName.valueOf("meta.example.org", 16010, 12345);
|
||||
|
@ -234,7 +234,7 @@ public class TestClientNoCluster extends Configured implements Tool {
|
|||
try {
|
||||
Result result = null;
|
||||
while ((result = scanner.next()) != null) {
|
||||
LOG.info(result);
|
||||
LOG.info(Objects.toString(result));
|
||||
}
|
||||
} finally {
|
||||
scanner.close();
|
||||
|
@ -256,7 +256,7 @@ public class TestClientNoCluster extends Configured implements Tool {
|
|||
try {
|
||||
Result result = null;
|
||||
while ((result = scanner.next()) != null) {
|
||||
LOG.info(result);
|
||||
LOG.info(Objects.toString(result));
|
||||
}
|
||||
} finally {
|
||||
scanner.close();
|
||||
|
|
|
@ -28,18 +28,18 @@ import java.util.Arrays;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.Abortable;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ ClientTests.class, SmallTests.class })
|
||||
public class TestInterfaceAlign {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestInterfaceAlign.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAlign.class);
|
||||
|
||||
/**
|
||||
* Test methods name match up
|
||||
|
|
|
@ -22,8 +22,6 @@ import static org.junit.Assert.fail;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
@ -39,7 +37,8 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
||||
|
||||
/**
|
||||
|
@ -48,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
|
|||
@Category({SmallTests.class, ClientTests.class})
|
||||
public class TestSnapshotFromAdmin {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestSnapshotFromAdmin.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromAdmin.class);
|
||||
|
||||
@Rule
|
||||
public TestName name = new TestName();
|
||||
|
|
|
@ -26,8 +26,6 @@ import static org.junit.Assert.fail;
|
|||
import java.io.IOException;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
|
@ -37,13 +35,15 @@ import org.junit.Rule;
|
|||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Test setting values in the descriptor
|
||||
*/
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestTableDescriptorBuilder {
|
||||
private static final Log LOG = LogFactory.getLog(TestTableDescriptorBuilder.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestTableDescriptorBuilder.class);
|
||||
|
||||
@Rule
|
||||
public TestName name = new TestName();
|
||||
|
|
|
@ -24,15 +24,12 @@ import java.nio.ByteBuffer;
|
|||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.lang3.time.StopWatch;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.commons.logging.impl.Log4JLogger;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.PrivateCellUtil;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.PrivateCellUtil;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.codec.KeyValueCodec;
|
||||
import org.apache.hadoop.hbase.io.SizedCellScanner;
|
||||
|
@ -44,15 +41,16 @@ import org.apache.hadoop.hbase.util.ClassSize;
|
|||
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||
import org.apache.hadoop.io.compress.DefaultCodec;
|
||||
import org.apache.hadoop.io.compress.GzipCodec;
|
||||
import org.apache.log4j.Level;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ ClientTests.class, SmallTests.class })
|
||||
public class TestCellBlockBuilder {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestCellBlockBuilder.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestCellBlockBuilder.class);
|
||||
|
||||
private CellBlockBuilder builder;
|
||||
|
||||
|
@ -190,7 +188,6 @@ public class TestCellBlockBuilder {
|
|||
}
|
||||
}
|
||||
CellBlockBuilder builder = new CellBlockBuilder(HBaseConfiguration.create());
|
||||
((Log4JLogger) CellBlockBuilder.LOG).getLogger().setLevel(Level.ALL);
|
||||
timerTests(builder, count, size, new KeyValueCodec(), null);
|
||||
timerTests(builder, count, size, new KeyValueCodec(), new DefaultCodec());
|
||||
timerTests(builder, count, size, new KeyValueCodec(), new GzipCodec());
|
||||
|
|
|
@ -201,8 +201,8 @@
|
|||
<artifactId>hbase-shaded-miscellaneous</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
|
|
|
@ -21,14 +21,14 @@ package org.apache.hadoop.hbase;
|
|||
import java.io.IOException;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.security.UserProvider;
|
||||
import org.apache.hadoop.hbase.util.DNS;
|
||||
import org.apache.hadoop.hbase.util.Strings;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Utility methods for helping with security tasks. Downstream users
|
||||
|
@ -68,7 +68,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class AuthUtil {
|
||||
private static final Log LOG = LogFactory.getLog(AuthUtil.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AuthUtil.class);
|
||||
|
||||
/** Prefix character to denote group names */
|
||||
private static final String GROUP_PREFIX = "@";
|
||||
|
|
|
@ -18,13 +18,13 @@
|
|||
|
||||
package org.apache.hadoop.hbase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.KeyValue.Type;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs;
|
||||
|
||||
|
@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public class CellComparatorImpl implements CellComparator {
|
||||
static final Log LOG = LogFactory.getLog(CellComparatorImpl.class);
|
||||
static final Logger LOG = LoggerFactory.getLogger(CellComparatorImpl.class);
|
||||
/**
|
||||
* Comparator for plain key/values; i.e. non-catalog table key/values. Works on Key portion
|
||||
* of KeyValue only.
|
||||
|
|
|
@ -27,10 +27,10 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
|
|||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
|
@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class ChoreService implements ChoreServicer {
|
||||
private static final Log LOG = LogFactory.getLog(ChoreService.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ChoreService.class);
|
||||
|
||||
/**
|
||||
* The minimum number of threads in the core pool of the underlying ScheduledThreadPoolExecutor
|
||||
|
|
|
@ -22,19 +22,19 @@ import java.lang.reflect.InvocationTargetException;
|
|||
import java.lang.reflect.Method;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.util.VersionInfo;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Adds HBase configuration files to a Configuration
|
||||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class HBaseConfiguration extends Configuration {
|
||||
private static final Log LOG = LogFactory.getLog(HBaseConfiguration.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HBaseConfiguration.class);
|
||||
|
||||
/**
|
||||
* Instantiating HBaseConfiguration() is deprecated. Please use
|
||||
|
|
|
@ -35,15 +35,16 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
import org.apache.hadoop.io.RawComparator;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* An HBase Key/Value. This is the fundamental HBase Type.
|
||||
* <p>
|
||||
|
@ -81,7 +82,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
|
|||
public class KeyValue implements ExtendedCell {
|
||||
private static final ArrayList<Tag> EMPTY_ARRAY_LIST = new ArrayList<>();
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(KeyValue.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(KeyValue.class);
|
||||
|
||||
public static final int FIXED_OVERHEAD = ClassSize.OBJECT + // the KeyValue object itself
|
||||
ClassSize.REFERENCE + // pointer to "bytes"
|
||||
|
@ -738,9 +739,9 @@ public class KeyValue implements ExtendedCell {
|
|||
}
|
||||
|
||||
public KeyValue(Cell c) {
|
||||
this(c.getRowArray(), c.getRowOffset(), (int)c.getRowLength(),
|
||||
c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(),
|
||||
c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(),
|
||||
this(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
|
||||
c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength(),
|
||||
c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(),
|
||||
c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(),
|
||||
c.getValueLength(), c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
|
||||
this.seqId = c.getSequenceId();
|
||||
|
|
|
@ -21,10 +21,9 @@ package org.apache.hadoop.hbase;
|
|||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
|
@ -42,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public abstract class ScheduledChore implements Runnable {
|
||||
private static final Log LOG = LogFactory.getLog(ScheduledChore.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ScheduledChore.class);
|
||||
|
||||
private final String name;
|
||||
|
||||
|
|
|
@ -23,18 +23,19 @@ import java.io.InputStream;
|
|||
import java.io.PushbackInputStream;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* TODO javadoc
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX})
|
||||
public abstract class BaseDecoder implements Codec.Decoder {
|
||||
protected static final Log LOG = LogFactory.getLog(BaseDecoder.class);
|
||||
protected static final Logger LOG = LoggerFactory.getLogger(BaseDecoder.class);
|
||||
|
||||
protected final InputStream in;
|
||||
private Cell current = null;
|
||||
|
|
|
@ -23,10 +23,10 @@ import java.nio.ByteBuffer;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* An OutputStream which writes data into ByteBuffers. It will try to get ByteBuffer, as and when
|
||||
|
@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class ByteBufferListOutputStream extends ByteBufferOutputStream {
|
||||
private static final Log LOG = LogFactory.getLog(ByteBufferListOutputStream.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ByteBufferListOutputStream.class);
|
||||
|
||||
private ByteBufferPool pool;
|
||||
// Keep track of the BBs where bytes written to. We will first try to get a BB from the pool. If
|
||||
|
@ -115,7 +115,7 @@ public class ByteBufferListOutputStream extends ByteBufferOutputStream {
|
|||
try {
|
||||
close();
|
||||
} catch (IOException e) {
|
||||
LOG.debug(e);
|
||||
LOG.debug(e.toString(), e);
|
||||
}
|
||||
// Return back all the BBs to pool
|
||||
if (this.bufsFromPool != null) {
|
||||
|
|
|
@ -22,9 +22,9 @@ import java.util.Queue;
|
|||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
|
@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class ByteBufferPool {
|
||||
private static final Log LOG = LogFactory.getLog(ByteBufferPool.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ByteBufferPool.class);
|
||||
// TODO better config names?
|
||||
// hbase.ipc.server.reservoir.initial.max -> hbase.ipc.server.reservoir.max.buffer.count
|
||||
// hbase.ipc.server.reservoir.initial.buffer.size -> hbase.ipc.server.reservoir.buffer.size
|
||||
|
|
|
@ -23,8 +23,6 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configurable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
|
@ -39,6 +37,8 @@ import org.apache.hadoop.io.compress.DoNotPool;
|
|||
import org.apache.hadoop.io.compress.GzipCodec;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Compression related stuff.
|
||||
|
@ -46,7 +46,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public final class Compression {
|
||||
private static final Log LOG = LogFactory.getLog(Compression.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Compression.class);
|
||||
|
||||
/**
|
||||
* Prevent the instantiation of class.
|
||||
|
|
|
@ -22,14 +22,14 @@ import java.io.OutputStream;
|
|||
import java.util.Arrays;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.util.JVM;
|
||||
import org.apache.hadoop.io.compress.CompressionOutputStream;
|
||||
import org.apache.hadoop.io.compress.CompressorStream;
|
||||
import org.apache.hadoop.io.compress.GzipCodec;
|
||||
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression
|
||||
|
@ -38,7 +38,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
@InterfaceAudience.Private
|
||||
public class ReusableStreamGzipCodec extends GzipCodec {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(Compression.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Compression.class);
|
||||
|
||||
/**
|
||||
* A bridge that wraps around a DeflaterOutputStream to make it a
|
||||
|
@ -70,7 +70,7 @@ public class ReusableStreamGzipCodec extends GzipCodec {
|
|||
try {
|
||||
gzipStream.close();
|
||||
} catch (IOException e) {
|
||||
LOG.error(e);
|
||||
LOG.error(e.toString(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,13 +27,12 @@ import java.security.spec.InvalidKeySpecException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import javax.crypto.SecretKeyFactory;
|
||||
import javax.crypto.spec.PBEKeySpec;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
@ -41,6 +40,8 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.Pair;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* A facade for encryption algorithms and related support.
|
||||
|
@ -48,7 +49,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
@InterfaceAudience.Public
|
||||
public final class Encryption {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(Encryption.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Encryption.class);
|
||||
|
||||
/**
|
||||
* Crypto context
|
||||
|
@ -420,7 +421,7 @@ public final class Encryption {
|
|||
*/
|
||||
public static Key getSecretKeyForSubject(String subject, Configuration conf)
|
||||
throws IOException {
|
||||
KeyProvider provider = (KeyProvider)getKeyProvider(conf);
|
||||
KeyProvider provider = getKeyProvider(conf);
|
||||
if (provider != null) try {
|
||||
Key[] keys = provider.getKeys(new String[] { subject });
|
||||
if (keys != null && keys.length > 0) {
|
||||
|
|
|
@ -23,10 +23,9 @@ import java.io.OutputStream;
|
|||
import java.security.GeneralSecurityException;
|
||||
import java.security.Key;
|
||||
import java.security.SecureRandom;
|
||||
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.io.crypto.Cipher;
|
||||
import org.apache.hadoop.hbase.io.crypto.CipherProvider;
|
||||
import org.apache.hadoop.hbase.io.crypto.Context;
|
||||
|
@ -34,6 +33,8 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor;
|
|||
import org.apache.hadoop.hbase.io.crypto.Encryptor;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
@ -48,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
|||
@InterfaceStability.Evolving
|
||||
public class AES extends Cipher {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AES.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AES.class);
|
||||
|
||||
public static final String CIPHER_MODE_KEY = "hbase.crypto.algorithm.aes.mode";
|
||||
public static final String CIPHER_PROVIDER_KEY = "hbase.crypto.algorithm.aes.provider";
|
||||
|
|
|
@ -24,11 +24,10 @@ import java.security.GeneralSecurityException;
|
|||
import java.security.Key;
|
||||
import java.security.SecureRandom;
|
||||
import java.util.Properties;
|
||||
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import org.apache.commons.crypto.cipher.CryptoCipherFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.io.crypto.Cipher;
|
||||
import org.apache.hadoop.hbase.io.crypto.CipherProvider;
|
||||
|
@ -37,6 +36,8 @@ import org.apache.hadoop.hbase.io.crypto.Decryptor;
|
|||
import org.apache.hadoop.hbase.io.crypto.Encryptor;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
||||
|
@ -45,7 +46,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
|
|||
@InterfaceStability.Evolving
|
||||
public class CommonsCryptoAES extends Cipher {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(CommonsCryptoAES.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(CommonsCryptoAES.class);
|
||||
|
||||
public static final String CIPHER_MODE_KEY = "hbase.crypto.commons.mode";
|
||||
public static final String CIPHER_CLASSES_KEY = "hbase.crypto.commons.cipher.classes";
|
||||
|
|
|
@ -13,16 +13,16 @@ package org.apache.hadoop.hbase.io.encoding;
|
|||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellComparatorImpl;
|
||||
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class RowIndexEncoderV1 {
|
||||
private static final Log LOG = LogFactory.getLog(RowIndexEncoderV1.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(RowIndexEncoderV1.class);
|
||||
|
||||
/** The Cell previously appended. */
|
||||
private Cell lastCell = null;
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.log;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Marker;
|
||||
import org.slf4j.MarkerFactory;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class HBaseMarkers {
|
||||
public static final Marker FATAL = MarkerFactory.getMarker("FATAL");
|
||||
|
||||
private HBaseMarkers() {
|
||||
}
|
||||
}
|
|
@ -23,11 +23,11 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.AuthUtil;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Keeps lists of superusers and super groups loaded from HBase configuration,
|
||||
|
@ -35,7 +35,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public final class Superusers {
|
||||
private static final Log LOG = LogFactory.getLog(Superusers.class);
|
||||
private static final Logger LOG = LoggerFactory.getLogger(Superusers.class);
|
||||
|
||||
/** Configuration key for superusers */
|
||||
public static final String SUPERUSER_CONF_KEY = "hbase.superuser"; // Not getting a name
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue