HBASE-20439 Clean up incorrect use of commons-logging in hbase-server

Signed-off-by: Umesh Agashe <uagashe@cloudera.com>
Signed-off-by: Yu Li <liyu@apache.org>

 Conflicts:
	hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
	hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.java
	hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.java
 Additions:
	hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
	hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
This commit is contained in:
Sean Busbey 2018-04-17 14:40:25 -05:00
parent af4dd3eb3c
commit 944ecc729c
9 changed files with 36 additions and 27 deletions

View File

@ -39,8 +39,6 @@ import java.util.Set;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@ -108,6 +106,8 @@ import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Simple test for {@link HFileOutputFormat2}.
@ -132,7 +132,7 @@ public class TestCellBasedHFileOutputFormat2 {
private HBaseTestingUtility util = new HBaseTestingUtility();
private static final Log LOG = LogFactory.getLog(TestCellBasedHFileOutputFormat2.class);
private static final Logger LOG = LoggerFactory.getLogger(TestCellBasedHFileOutputFormat2.class);
/**
* Simple mapper that makes KeyValue output.

View File

@ -34,8 +34,6 @@ import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -89,6 +87,8 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests the table import and table export MR job functionality
@ -100,7 +100,7 @@ public class TestCellBasedImportExport2 {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCellBasedImportExport2.class);
private static final Log LOG = LogFactory.getLog(TestCellBasedImportExport2.class);
private static final Logger LOG = LoggerFactory.getLogger(TestCellBasedImportExport2.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2");

View File

@ -25,8 +25,6 @@ import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.util.Threads;
@ -37,6 +35,9 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tracks the target znode(s) on server ZK cluster and synchronize them to client ZK cluster if
* changed
@ -45,7 +46,7 @@ import org.apache.zookeeper.KeeperException;
*/
@InterfaceAudience.Private
public abstract class ClientZKSyncer extends ZKListener {
private static final Log LOG = LogFactory.getLog(ClientZKSyncer.class);
private static final Logger LOG = LoggerFactory.getLogger(ClientZKSyncer.class);
private final Server server;
private final ZKWatcher clientZkWatcher;
// We use queues and daemon threads to synchronize the data to client ZK cluster

View File

@ -21,8 +21,6 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.RegionTooBusyException;
@ -31,7 +29,12 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
/**
* StoreHotnessProtector is designed to help limit the concurrency of puts with dense columns, it
* does best-effort to avoid exhausting all RS's handlers. When a lot of clients write requests with
@ -60,7 +63,7 @@ import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesti
*/
@InterfaceAudience.Private
public class StoreHotnessProtector {
private static final Log LOG = LogFactory.getLog(StoreHotnessProtector.class);
private static final Logger LOG = LoggerFactory.getLogger(StoreHotnessProtector.class);
private volatile int parallelPutToStoreThreadLimit;
private volatile int parallelPreparePutToStoreThreadLimit;

View File

@ -22,8 +22,6 @@ import static org.junit.Assert.assertTrue;
import java.net.BindException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -31,6 +29,9 @@ import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestClusterPortAssignment {
@ClassRule
@ -38,7 +39,7 @@ public class TestClusterPortAssignment {
HBaseClassTestRule.forClass(TestClusterPortAssignment.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final Log LOG = LogFactory.getLog(TestClusterPortAssignment.class);
private static final Logger LOG = LoggerFactory.getLogger(TestClusterPortAssignment.class);
/**
* Check that we can start an HBase cluster specifying a custom set of

View File

@ -25,8 +25,6 @@ import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
@ -47,6 +45,9 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category({MediumTests.class, ClientTests.class})
public class TestFlushFromClient {
@ -54,7 +55,7 @@ public class TestFlushFromClient {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestFlushFromClient.class);
private static final Log LOG = LogFactory.getLog(TestFlushFromClient.class);
private static final Logger LOG = LoggerFactory.getLogger(TestFlushFromClient.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static AsyncConnection asyncConn;
private static final byte[][] SPLITS = new byte[][]{Bytes.toBytes("3"), Bytes.toBytes("7")};

View File

@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.client;
import java.io.File;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@ -44,9 +42,12 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category(MediumTests.class)
public class TestSeparateClientZKCluster {
private static final Log LOG = LogFactory.getLog(TestSeparateClientZKCluster.class);
private static final Logger LOG = LoggerFactory.getLogger(TestSeparateClientZKCluster.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final File clientZkDir = new File("/tmp/TestSeparateClientZKCluster");
private static final int ZK_SESSION_TIMEOUT = 5000;

View File

@ -23,8 +23,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -45,6 +43,9 @@ import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
/**
@ -57,7 +58,7 @@ public class TestFailedProcCleanup {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestFailedProcCleanup.class);
private static final Log LOG = LogFactory.getLog(TestFailedProcCleanup.class);
private static final Logger LOG = LoggerFactory.getLogger(TestFailedProcCleanup.class);
protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf;

View File

@ -22,8 +22,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -45,6 +43,9 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Category({ RegionServerTests.class, MediumTests.class })
public class TestDisabledWAL {
@ -55,7 +56,7 @@ public class TestDisabledWAL {
@Rule
public TestName name = new TestName();
private static final Log LOG = LogFactory.getLog(TestDisabledWAL.class);
private static final Logger LOG = LoggerFactory.getLogger(TestDisabledWAL.class);
static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private Table table;
private TableName tableName;