HBASE-26081 Copy HBTU to hbase-testing-util, rename the HBTU related classes in hbase-server and mark them as IA.LimitedPrivate (#3478)

Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
Duo Zhang 2021-07-19 09:29:08 +08:00 committed by GitHub
parent 83d1bf1667
commit d30cc27097
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
933 changed files with 9698 additions and 3121 deletions

View File

@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Get;
@ -46,8 +46,8 @@ public class TestHelloHBase {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHelloHBase.class);
private static final HBaseTestingUtility TEST_UTIL
= new HBaseTestingUtility();
private static final HBaseTestingUtil TEST_UTIL
= new HBaseTestingUtil();
@BeforeClass
public static void beforeClass() throws Exception {

View File

@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Get;
@ -46,8 +46,8 @@ public class TestHelloHBase {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHelloHBase.class);
private static final HBaseTestingUtility TEST_UTIL
= new HBaseTestingUtility();
private static final HBaseTestingUtil TEST_UTIL
= new HBaseTestingUtil();
@BeforeClass
public static void beforeClass() throws Exception {

View File

@ -21,7 +21,7 @@ import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -30,7 +30,7 @@ public abstract class AsyncFSTestBase {
private static final Logger LOG = LoggerFactory.getLogger(AsyncFSTestBase.class);
protected static final HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
protected static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil();
protected static File CLUSTER_TEST_DIR;
@ -49,7 +49,7 @@ public abstract class AsyncFSTestBase {
// Using randomUUID ensures that multiple clusters can be launched by
// a same test, if it stops & starts them
Path testDir =
UTIL.getDataTestDir("cluster_" + HBaseCommonTestingUtility.getRandomUUID().toString());
UTIL.getDataTestDir("cluster_" + HBaseCommonTestingUtil.getRandomUUID().toString());
CLUSTER_TEST_DIR = new File(testDir.toString()).getAbsoluteFile();
// Have it cleaned up on exit
boolean b = deleteOnExit();

View File

@ -22,7 +22,7 @@ import java.util.concurrent.ExecutionException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.CommonFSUtils;
@ -47,7 +47,7 @@ public class TestLocalAsyncOutput {
private static Class<? extends Channel> CHANNEL_CLASS = NioSocketChannel.class;
private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil TEST_UTIL = new HBaseCommonTestingUtil();
@AfterClass
public static void tearDownAfterClass() throws IOException {

View File

@ -23,7 +23,7 @@ import java.net.InetAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
import org.apache.hadoop.hdfs.DFSConfigKeys;
@ -159,7 +159,7 @@ public final class HBaseKerberosUtils {
* @param clazz the caller test class.
* @throws Exception if unable to set up SSL configuration
*/
public static void setSSLConfiguration(HBaseCommonTestingUtility utility, Class<?> clazz)
public static void setSSLConfiguration(HBaseCommonTestingUtil utility, Class<?> clazz)
throws Exception {
Configuration conf = utility.getConfiguration();
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());

View File

@ -24,7 +24,7 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hdfs.DistributedFileSystem;
@ -43,7 +43,7 @@ public class TestRecoverLeaseFSUtils {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestRecoverLeaseFSUtils.class);
private static final HBaseCommonTestingUtility HTU = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil HTU = new HBaseCommonTestingUtil();
static {
Configuration conf = HTU.getConfiguration();
conf.setInt("hbase.lease.recovery.first.pause", 10);

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
@ -78,8 +78,8 @@ import org.slf4j.LoggerFactory;
public class TestBackupBase {
private static final Logger LOG = LoggerFactory.getLogger(TestBackupBase.class);
protected static HBaseTestingUtility TEST_UTIL;
protected static HBaseTestingUtility TEST_UTIL2;
protected static HBaseTestingUtil TEST_UTIL;
protected static HBaseTestingUtil TEST_UTIL2;
protected static Configuration conf1;
protected static Configuration conf2;
@ -296,7 +296,7 @@ public class TestBackupBase {
if (useSecondCluster) {
conf2 = HBaseConfiguration.create(conf1);
conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2");
TEST_UTIL2 = new HBaseTestingUtility(conf2);
TEST_UTIL2 = new HBaseTestingUtil(conf2);
TEST_UTIL2.setZkCluster(TEST_UTIL.getZkCluster());
TEST_UTIL2.startMiniDFSCluster(3);
String root2 = TEST_UTIL2.getConfiguration().get("fs.defaultFS");
@ -329,7 +329,7 @@ public class TestBackupBase {
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
TEST_UTIL = new HBaseTestingUtil();
conf1 = TEST_UTIL.getConfiguration();
autoRestoreOnFailure = true;
useSecondCluster = false;

View File

@ -27,7 +27,7 @@ import java.util.Optional;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
@ -120,7 +120,7 @@ public class TestBackupDeleteWithFailures extends TestBackupBase{
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
TEST_UTIL = new HBaseTestingUtil();
conf1 = TEST_UTIL.getConfiguration();
conf1.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
MasterSnapshotObserver.class.getName());

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.client.Connection;
@ -56,7 +56,7 @@ public class TestBackupHFileCleaner {
HBaseClassTestRule.forClass(TestBackupHFileCleaner.class);
private static final Logger LOG = LoggerFactory.getLogger(TestBackupHFileCleaner.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
private static Configuration conf = TEST_UTIL.getConfiguration();
private static TableName tableName = TableName.valueOf("backup.hfile.cleaner");
private static String famName = "fam";

View File

@ -26,8 +26,8 @@ import java.util.concurrent.atomic.AtomicLongArray;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -53,9 +53,9 @@ public class TestBackupManager {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestBackupManager.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
protected static Configuration conf = UTIL.getConfiguration();
protected static MiniHBaseCluster cluster;
protected static SingleProcessHBaseCluster cluster;
protected static Connection conn;
protected BackupManager backupManager;

View File

@ -38,8 +38,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
import org.apache.hadoop.hbase.backup.impl.BackupManager;
@ -66,9 +66,9 @@ public class TestBackupSystemTable {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestBackupSystemTable.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
protected static Configuration conf = UTIL.getConfiguration();
protected static MiniHBaseCluster cluster;
protected static SingleProcessHBaseCluster cluster;
protected static Connection conn;
protected BackupSystemTable table;

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.security.UserGroupInformation;
@ -42,7 +42,7 @@ public class TestBackupUtils {
HBaseClassTestRule.forClass(TestBackupUtils.class);
private static final Logger LOG = LoggerFactory.getLogger(TestBackupUtils.class);
protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
protected static Configuration conf = TEST_UTIL.getConfiguration();
@Test

View File

@ -23,8 +23,8 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupAdminImpl;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
@ -103,7 +103,7 @@ public class TestIncrementalBackup extends TestBackupBase {
// #2 - insert some data to table
Table t1 = insertIntoTable(conn, table1, famName, 1, ADD_ROWS);
LOG.debug("writing " + ADD_ROWS + " rows to " + table1);
Assert.assertEquals(HBaseTestingUtility.countRows(t1),
Assert.assertEquals(HBaseTestingUtil.countRows(t1),
NB_ROWS_IN_BATCH + ADD_ROWS + NB_ROWS_FAM3);
LOG.debug("written " + ADD_ROWS + " rows to " + table1);
// additionally, insert rows to MOB cf
@ -111,7 +111,7 @@ public class TestIncrementalBackup extends TestBackupBase {
insertIntoTable(conn, table1, mobName, 3, NB_ROWS_MOB);
LOG.debug("written " + NB_ROWS_MOB + " rows to " + table1 + " to Mob enabled CF");
t1.close();
Assert.assertEquals(HBaseTestingUtility.countRows(t1),
Assert.assertEquals(HBaseTestingUtil.countRows(t1),
NB_ROWS_IN_BATCH + ADD_ROWS + NB_ROWS_MOB);
Table t2 = conn.getTable(table2);
Put p2;
@ -120,11 +120,11 @@ public class TestIncrementalBackup extends TestBackupBase {
p2.addColumn(famName, qualName, Bytes.toBytes("val" + i));
t2.put(p2);
}
Assert.assertEquals(NB_ROWS_IN_BATCH + 5, HBaseTestingUtility.countRows(t2));
Assert.assertEquals(NB_ROWS_IN_BATCH + 5, HBaseTestingUtil.countRows(t2));
t2.close();
LOG.debug("written " + 5 + " rows to " + table2);
// split table1
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
SingleProcessHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
List<HRegion> regions = cluster.getRegions(table1);
byte[] name = regions.get(0).getRegionInfo().getRegionName();
long startSplitTime = EnvironmentEdgeManager.currentTime();
@ -184,11 +184,11 @@ public class TestIncrementalBackup extends TestBackupBase {
// #6.2 - checking row count of tables for full restore
Table hTable = conn.getTable(table1_restore);
Assert.assertEquals(HBaseTestingUtility.countRows(hTable), NB_ROWS_IN_BATCH + NB_ROWS_FAM3);
Assert.assertEquals(HBaseTestingUtil.countRows(hTable), NB_ROWS_IN_BATCH + NB_ROWS_FAM3);
hTable.close();
hTable = conn.getTable(table2_restore);
Assert.assertEquals(NB_ROWS_IN_BATCH, HBaseTestingUtility.countRows(hTable));
Assert.assertEquals(NB_ROWS_IN_BATCH, HBaseTestingUtil.countRows(hTable));
hTable.close();
// #7 - restore incremental backup for multiple tables, with overwrite
@ -213,7 +213,7 @@ public class TestIncrementalBackup extends TestBackupBase {
hTable.close();
hTable = conn.getTable(table2_restore);
Assert.assertEquals(NB_ROWS_IN_BATCH + 5, HBaseTestingUtility.countRows(hTable));
Assert.assertEquals(NB_ROWS_IN_BATCH + 5, HBaseTestingUtil.countRows(hTable));
hTable.close();
admin.close();
}

View File

@ -22,7 +22,7 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
@ -64,7 +64,7 @@ public class TestRemoteBackup extends TestBackupBase {
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
TEST_UTIL = new HBaseTestingUtil();
conf1 = TEST_UTIL.getConfiguration();
conf1.setInt(HConstants.REGION_SERVER_HANDLER_COUNT, 10);
useSecondCluster = true;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.backup;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.client.Admin;
@ -48,7 +48,7 @@ public class TestRemoteRestore extends TestBackupBase {
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
TEST_UTIL = new HBaseTestingUtil();
conf1 = TEST_UTIL.getConfiguration();
useSecondCluster = true;
setUpHelper();

View File

@ -25,8 +25,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
@ -57,7 +56,7 @@ public class LoadBalancerPerformanceEvaluation extends AbstractHBaseTool {
private static final Logger LOG =
LoggerFactory.getLogger(LoadBalancerPerformanceEvaluation.class.getName());
protected static final HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
protected static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil();
private static final int DEFAULT_NUM_REGIONS = 1000000;
private static Option NUM_REGIONS_OPT = new Option("regions", true,

View File

@ -35,7 +35,7 @@ import java.util.TreeMap;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
@ -61,7 +61,7 @@ public class TestStochasticLoadBalancerHeterogeneousCost extends StochasticBalan
private static final Logger LOG =
LoggerFactory.getLogger(TestStochasticLoadBalancerHeterogeneousCost.class);
private static final double ALLOWED_WINDOW = 1.20;
private static final HBaseCommonTestingUtility HTU = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil HTU = new HBaseCommonTestingUtil();
private static String RULES_FILE;
@BeforeClass

View File

@ -26,7 +26,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.junit.Before;
@ -47,7 +47,7 @@ public class TestStochasticLoadBalancerHeterogeneousCostRules extends Stochastic
public TestName name = new TestName();
private HeterogeneousRegionCountCostFunction costFunction;
private static final HBaseCommonTestingUtility HTU = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil HTU = new HBaseCommonTestingUtil();
/**
* Make a file for rules that is inside a temporary test dir named for the method so it doesn't

View File

@ -32,7 +32,7 @@ import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.ipc.RpcClient;
@ -69,7 +69,7 @@ public class TestMasterRegistryHedgedReads {
private static final Logger LOG = LoggerFactory.getLogger(TestMasterRegistryHedgedReads.class);
private static final HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil();
private static final ExecutorService EXECUTOR =
Executors.newCachedThreadPool(new ThreadFactoryBuilder().setDaemon(true).build());

View File

@ -25,7 +25,7 @@ import java.util.Collection;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
@ -54,7 +54,7 @@ public class TestKeyOnlyFilter {
@Parameters
public static Collection<Object[]> parameters() {
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED;
}
@Test

View File

@ -0,0 +1,346 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.File;
import java.io.IOException;
import java.net.ServerSocket;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Waiter.Predicate;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Common helpers for testing HBase that do not depend on specific server/etc. things.
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.PHOENIX)
@InterfaceStability.Evolving
public class HBaseCommonTestingUtil {
protected static final Logger LOG = LoggerFactory.getLogger(HBaseCommonTestingUtil.class);
/**
* Compression algorithms to use in parameterized JUnit 4 tests
*/
public static final List<Object[]> COMPRESSION_ALGORITHMS_PARAMETERIZED =
Arrays.asList(new Object[][] {
{ Compression.Algorithm.NONE },
{ Compression.Algorithm.GZ }
});
/**
* This is for unit tests parameterized with a two booleans.
*/
public static final List<Object[]> BOOLEAN_PARAMETERIZED =
Arrays.asList(new Object[][] {
{false},
{true}
});
/**
* Compression algorithms to use in testing
*/
public static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = {
Compression.Algorithm.NONE, Compression.Algorithm.GZ
};
protected final Configuration conf;
public HBaseCommonTestingUtil() {
this(null);
}
public HBaseCommonTestingUtil(Configuration conf) {
this.conf = (conf == null ? HBaseConfiguration.create() : conf);
}
/**
* Returns this classes's instance of {@link Configuration}.
*
* @return Instance of Configuration.
*/
public Configuration getConfiguration() {
return this.conf;
}
/**
* System property key to get base test directory value
*/
public static final String BASE_TEST_DIRECTORY_KEY =
"test.build.data.basedirectory";
/**
* Default base directory for test output.
*/
public static final String DEFAULT_BASE_TEST_DIRECTORY = "target/test-data";
/**
* Directory where we put the data for this instance of HBaseTestingUtility
*/
private File dataTestDir = null;
/**
* Returns Where to write test data on local filesystem, specific to the test. Useful for tests
* that do not use a cluster. Creates it if it does not exist already.
*/
public Path getDataTestDir() {
if (this.dataTestDir == null) {
setupDataTestDir();
}
return new Path(this.dataTestDir.getAbsolutePath());
}
/**
* Returns the path to a subdirectory or file named {code subdirName} under
* {@link #getDataTestDir()}. Does *NOT* create the directory or file if it does not exist.
* @param name the name of a subdirectory or file in the test data directory
*/
public Path getDataTestDir(final String name) {
return new Path(getDataTestDir(), name);
}
/**
* Sets up a directory for a test to use.
*
* @return New directory path, if created.
*/
protected Path setupDataTestDir() {
if (this.dataTestDir != null) {
LOG.warn("Data test dir already setup in " +
dataTestDir.getAbsolutePath());
return null;
}
Path testPath = getRandomDir();
this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
// Set this property so if mapreduce jobs run, they will use this as their home dir.
System.setProperty("test.build.dir", this.dataTestDir.toString());
if (deleteOnExit()) {
this.dataTestDir.deleteOnExit();
}
createSubDir("hbase.local.dir", testPath, "hbase-local-dir");
return testPath;
}
/**
* Returns A dir with a random (uuid) name under the test dir
* @see #getBaseTestDir()
*/
public Path getRandomDir() {
return new Path(getBaseTestDir(), getRandomUUID().toString());
}
public static UUID getRandomUUID() {
return new UUID(ThreadLocalRandom.current().nextLong(),
ThreadLocalRandom.current().nextLong());
}
protected void createSubDir(String propertyName, Path parent, String subDirName) {
Path newPath = new Path(parent, subDirName);
File newDir = new File(newPath.toString()).getAbsoluteFile();
if (deleteOnExit()) {
newDir.deleteOnExit();
}
conf.set(propertyName, newDir.getAbsolutePath());
}
/**
* Returns true if we should delete testing dirs on exit.
*/
boolean deleteOnExit() {
String v = System.getProperty("hbase.testing.preserve.testdir");
// Let default be true, to delete on exit.
return v == null ? true : !Boolean.parseBoolean(v);
}
/**
* Returns true if we removed the test dirs
*/
public boolean cleanupTestDir() {
if (deleteDir(this.dataTestDir)) {
this.dataTestDir = null;
return true;
}
return false;
}
/**
* Returns true if we removed the test dir
* @param subdir Test subdir name.
*/
public boolean cleanupTestDir(final String subdir) {
if (this.dataTestDir == null) {
return false;
}
return deleteDir(new File(this.dataTestDir, subdir));
}
/**
* Returns Where to write test data on local filesystem; usually
* {@link #DEFAULT_BASE_TEST_DIRECTORY} Should not be used by the unit tests, hence its's private.
* Unit test will use a subdirectory of this directory.
* @see #setupDataTestDir()
*/
private Path getBaseTestDir() {
String PathName = System.getProperty(
BASE_TEST_DIRECTORY_KEY, DEFAULT_BASE_TEST_DIRECTORY);
return new Path(PathName);
}
/**
* Returns true if we deleted it.
* @param dir Directory to delete
*/
boolean deleteDir(final File dir) {
if (dir == null || !dir.exists()) {
return true;
}
int ntries = 0;
do {
ntries += 1;
try {
if (deleteOnExit()) {
FileUtils.deleteDirectory(dir);
}
return true;
} catch (IOException ex) {
LOG.warn("Failed to delete " + dir.getAbsolutePath());
} catch (IllegalArgumentException ex) {
LOG.warn("Failed to delete " + dir.getAbsolutePath(), ex);
}
} while (ntries < 30);
return false;
}
/**
* Wrapper method for {@link Waiter#waitFor(Configuration, long, Predicate)}.
*/
public <E extends Exception> long waitFor(long timeout, Predicate<E> predicate)
throws E {
return Waiter.waitFor(this.conf, timeout, predicate);
}
/**
* Wrapper method for {@link Waiter#waitFor(Configuration, long, long, Predicate)}.
*/
public <E extends Exception> long waitFor(long timeout, long interval, Predicate<E> predicate)
throws E {
return Waiter.waitFor(this.conf, timeout, interval, predicate);
}
/**
* Wrapper method for {@link Waiter#waitFor(Configuration, long, long, boolean, Predicate)}.
*/
public <E extends Exception> long waitFor(long timeout, long interval,
boolean failIfTimeout, Predicate<E> predicate) throws E {
return Waiter.waitFor(this.conf, timeout, interval, failIfTimeout, predicate);
}
// Support for Random Port Generation.
static Random random = new Random();
private static final PortAllocator portAllocator = new PortAllocator(random);
public static int randomFreePort() {
return portAllocator.randomFreePort();
}
static class PortAllocator {
private static final int MIN_RANDOM_PORT = 0xc000;
private static final int MAX_RANDOM_PORT = 0xfffe;
/** A set of ports that have been claimed using {@link #randomFreePort()}. */
private final Set<Integer> takenRandomPorts = new HashSet<>();
private final Random random;
private final AvailablePortChecker portChecker;
public PortAllocator(Random random) {
this.random = random;
this.portChecker = new AvailablePortChecker() {
@Override
public boolean available(int port) {
try {
ServerSocket sock = new ServerSocket(port);
sock.close();
return true;
} catch (IOException ex) {
return false;
}
}
};
}
public PortAllocator(Random random, AvailablePortChecker portChecker) {
this.random = random;
this.portChecker = portChecker;
}
/**
* Returns a random free port and marks that port as taken. Not thread-safe. Expected to be
* called from single-threaded test setup code/
*/
public int randomFreePort() {
int port = 0;
do {
port = randomPort();
if (takenRandomPorts.contains(port)) {
port = 0;
continue;
}
takenRandomPorts.add(port);
if (!portChecker.available(port)) {
port = 0;
}
} while (port == 0);
return port;
}
/**
* Returns a random port. These ports cannot be registered with IANA and are
* intended for dynamic allocation (see http://bit.ly/dynports).
*/
private int randomPort() {
return MIN_RANDOM_PORT
+ random.nextInt(MAX_RANDOM_PORT - MIN_RANDOM_PORT);
}
interface AvailablePortChecker {
boolean available(int port);
}
}
}

View File

@ -60,7 +60,7 @@ public class TestClassFinder {
private static final Logger LOG = LoggerFactory.getLogger(TestClassFinder.class);
@Rule public TestName name = new TestName();
private static final HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil testUtil = new HBaseCommonTestingUtil();
private static final String BASEPKG = "tfcpkg";
private static final String PREFIX = "Prefix";

View File

@ -48,7 +48,7 @@ public class TestHBaseConfiguration {
private static final Logger LOG = LoggerFactory.getLogger(TestHBaseConfiguration.class);
private static HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
private static HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil();
@AfterClass
public static void tearDown() throws IOException {

View File

@ -29,7 +29,7 @@ import java.security.MessageDigest;
import java.util.Properties;
import javax.crypto.spec.SecretKeySpec;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@ -48,7 +48,7 @@ public class TestKeyStoreKeyProvider {
HBaseClassTestRule.forClass(TestKeyStoreKeyProvider.class);
private static final Logger LOG = LoggerFactory.getLogger(TestKeyStoreKeyProvider.class);
static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
static final HBaseCommonTestingUtil TEST_UTIL = new HBaseCommonTestingUtil();
static final String ALIAS = "test";
static final String PASSWORD = "password";

View File

@ -44,7 +44,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@ -76,7 +76,7 @@ public class TestByteBufferUtils {
@Parameterized.Parameters
public static Collection<Object[]> parameters() {
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED;
}
private static void setUnsafe(String fieldName, boolean value) throws Exception {

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -46,12 +46,12 @@ public class TestCommonFSUtils {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCommonFSUtils.class);
private HBaseCommonTestingUtility htu;
private HBaseCommonTestingUtil htu;
private Configuration conf;
@Before
public void setUp() throws IOException {
htu = new HBaseCommonTestingUtility();
htu = new HBaseCommonTestingUtil();
conf = htu.getConfiguration();
}

View File

@ -29,7 +29,7 @@ import java.io.FileOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.io.IOUtils;
@ -46,7 +46,7 @@ public class TestCoprocessorClassLoader {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCoprocessorClassLoader.class);
private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil TEST_UTIL = new HBaseCommonTestingUtil();
private static final Configuration conf = TEST_UTIL.getConfiguration();
static {
TEST_UTIL.getDataTestDir(); // prepare data test dir and hbase local dir

View File

@ -23,7 +23,7 @@ import static org.junit.Assert.fail;
import java.io.File;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Before;
@ -45,7 +45,7 @@ public class TestDynamicClassLoader {
private static final Logger LOG = LoggerFactory.getLogger(TestDynamicClassLoader.class);
private static final HBaseCommonTestingUtility TEST_UTIL = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil TEST_UTIL = new HBaseCommonTestingUtil();
private Configuration conf;
static {

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.util;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.kerby.kerberos.kerb.KrbException;
@ -34,7 +34,7 @@ public class TestSimpleKdcServerUtil {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSimpleKdcServerUtil.class);
private static final HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
private static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil();
/**
* Test we are able to ride over clashing port... BindException.. when starting up a
@ -46,7 +46,7 @@ public class TestSimpleKdcServerUtil {
try {
File dir = new File(UTIL.getDataTestDir().toString());
kdc = SimpleKdcServerUtil.
getRunningSimpleKdcServer(dir, HBaseCommonTestingUtility::randomFreePort, true);
getRunningSimpleKdcServer(dir, HBaseCommonTestingUtil::randomFreePort, true);
kdc.createPrincipal("wah");
} finally {
kdc.stop();

View File

@ -24,7 +24,7 @@ import java.util.stream.Collectors;
import java.util.stream.LongStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.coprocessor.AsyncAggregationClient;
import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter;
@ -46,7 +46,7 @@ public class TestAsyncAggregationClient {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestAsyncAggregationClient.class);
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static TableName TABLE_NAME = TableName.valueOf("TestAsyncAggregationClient");

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@ -29,7 +29,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Scan.ReadType;
@ -99,7 +99,7 @@ public class TestRpcControllerFactory {
}
}
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
@Rule
public TestName name = new TestName();

View File

@ -25,7 +25,7 @@ import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -80,7 +80,7 @@ public class TestBatchCoprocessorEndpoint {
private static final int rowSeperator2 = 12;
private static byte[][] ROWS = makeN(ROW, ROWSIZE);
private static HBaseTestingUtility util = new HBaseTestingUtility();
private static HBaseTestingUtil util = new HBaseTestingUtil();
@BeforeClass
public static void setupBeforeClass() throws Exception {

View File

@ -37,11 +37,11 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.RegionMetrics;
import org.apache.hadoop.hbase.ServerMetrics;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -76,7 +76,7 @@ public class TestClassLoading {
HBaseClassTestRule.forClass(TestClassLoading.class);
private static final Logger LOG = LoggerFactory.getLogger(TestClassLoading.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
public static class TestMasterCoprocessor implements MasterCoprocessor, MasterObserver {
@Override
@ -199,7 +199,7 @@ public class TestClassLoading {
boolean foundTableRegion=false;
boolean found1 = true, found2 = true, found2_k1 = true, found2_k2 = true, found2_k3 = true;
Map<Region, Set<ClassLoader>> regionsActiveClassLoaders = new HashMap<>();
MiniHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
for (HRegion region:
hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
if (region.getRegionInfo().getRegionNameAsString().startsWith(tableName.getNameAsString())) {
@ -272,7 +272,7 @@ public class TestClassLoading {
// verify that the coprocessor was loaded
boolean found = false;
MiniHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
for (HRegion region: hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
if (region.getRegionInfo().getRegionNameAsString().startsWith(cpName3)) {
found = (region.getCoprocessorHost().findCoprocessor(cpName3) != null);
@ -299,7 +299,7 @@ public class TestClassLoading {
// verify that the coprocessor was loaded correctly
boolean found = false;
MiniHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
for (HRegion region: hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
if (region.getRegionInfo().getRegionNameAsString().startsWith(cpName4)) {
Coprocessor cp = region.getCoprocessorHost().findCoprocessor(cpName4);
@ -384,7 +384,7 @@ public class TestClassLoading {
boolean found6_k1 = false, found6_k2 = false, found6_k3 = false,
found6_k4 = false;
MiniHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
for (HRegion region: hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
if (region.getRegionInfo().getRegionNameAsString().startsWith(tableName.getNameAsString())) {
found_1 = found_1 ||
@ -475,7 +475,7 @@ public class TestClassLoading {
// verify that the coprocessors were loaded
boolean found1 = false, found2 = false, found2_k1 = false,
found2_k2 = false, found2_k3 = false;
MiniHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
SingleProcessHBaseCluster hbase = TEST_UTIL.getHBaseCluster();
for (HRegion region: hbase.getRegionServer(0).getOnlineRegionsLocalContext()) {
if (region.getRegionInfo().getRegionNameAsString().startsWith(tableName.getNameAsString())) {
CoprocessorEnvironment env;

View File

@ -30,7 +30,7 @@ import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
@ -87,7 +87,7 @@ public class TestCoprocessorEndpoint {
private static final int rowSeperator2 = 12;
private static byte[][] ROWS = makeN(ROW, ROWSIZE);
private static HBaseTestingUtility util = new HBaseTestingUtility();
private static HBaseTestingUtil util = new HBaseTestingUtil();
@BeforeClass
public static void setupBeforeClass() throws Exception {

View File

@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -62,7 +62,7 @@ public class TestCoprocessorTableEndpoint {
private static final int rowSeperator2 = 12;
private static final byte[][] ROWS = makeN(ROW, ROWSIZE);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
@Rule
public TestName name = new TestName();

View File

@ -24,7 +24,7 @@ import java.io.FileNotFoundException;
import java.util.Collections;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
@ -54,13 +54,13 @@ public class TestRegionServerCoprocessorEndpoint {
HBaseClassTestRule.forClass(TestRegionServerCoprocessorEndpoint.class);
public static final FileNotFoundException WHAT_TO_THROW = new FileNotFoundException("/file.txt");
private static HBaseTestingUtility TEST_UTIL = null;
private static HBaseTestingUtil TEST_UTIL = null;
private static Configuration CONF = null;
private static final String DUMMY_VALUE = "val";
@BeforeClass
public static void setupBeforeClass() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
TEST_UTIL = new HBaseTestingUtil();
CONF = TEST_UTIL.getConfiguration();
CONF.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY,
DummyRegionServerEndpoint.class.getName());

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
@ -91,7 +91,7 @@ public class TestSecureExport {
HBaseClassTestRule.forClass(TestSecureExport.class);
private static final Logger LOG = LoggerFactory.getLogger(TestSecureExport.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static MiniKdc KDC;
private static final File KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath());
private static String USERNAME;

View File

@ -26,7 +26,7 @@ import java.util.Collections;
import java.util.Map;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
@ -150,7 +150,7 @@ public class TestServerCustomProtocol {
private static final byte[] ROW_AB = Bytes.toBytes("abb");
private static final byte[] ROW_BC = Bytes.toBytes("bcc");
private static HBaseTestingUtility util = new HBaseTestingUtility();
private static HBaseTestingUtil util = new HBaseTestingUtil();
@BeforeClass
public static void setupBeforeClass() throws Exception {

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client.example;
import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -39,7 +39,7 @@ public class TestAsyncClientExample {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestAsyncClientExample.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final TableName TABLE_NAME = TableName.valueOf("test");

View File

@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
import java.nio.charset.StandardCharsets;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -49,7 +49,7 @@ public class TestHttpProxyExample {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHttpProxyExample.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final TableName TABLE_NAME = TableName.valueOf("test");

View File

@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory;
public class TestRefreshHFilesBase {
protected static final Logger LOG = LoggerFactory.getLogger(TestRefreshHFilesBase.class);
protected static final HBaseTestingUtility HTU = new HBaseTestingUtility();
protected static final HBaseTestingUtil HTU = new HBaseTestingUtil();
protected static final int NUM_RS = 2;
protected static final TableName TABLE_NAME = TableName.valueOf("testRefreshRegionHFilesEP");
protected static final byte[] FAMILY = Bytes.toBytes("family");
@ -47,7 +47,7 @@ public class TestRefreshHFilesBase {
protected static final String HFILE_NAME = "123abcdef";
protected static Configuration CONF = HTU.getConfiguration();
protected static MiniHBaseCluster cluster;
protected static SingleProcessHBaseCluster cluster;
protected static Table table;
public static void setUp(String regionImpl) {

View File

@ -26,7 +26,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -52,7 +52,7 @@ public class TestScanModifyingObserver {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestScanModifyingObserver.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final TableName NAME = TableName.valueOf("TestScanModifications");
private static final byte[] FAMILY = Bytes.toBytes("f");
private static final ColumnFamilyDescriptor CFD = ColumnFamilyDescriptorBuilder

View File

@ -26,7 +26,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -52,7 +52,7 @@ public class TestValueReplacingCompaction {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestValueReplacingCompaction.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final TableName NAME = TableName.valueOf("TestValueReplacement");
private static final byte[] FAMILY = Bytes.toBytes("f");
private static final byte[] QUALIFIER = Bytes.toBytes("q");

View File

@ -22,7 +22,7 @@ import static org.junit.Assert.assertFalse;
import java.io.IOException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Get;
@ -50,7 +50,7 @@ public class TestZooKeeperScanPolicyObserver {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestZooKeeperScanPolicyObserver.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static TableName NAME = TableName.valueOf("TestCP");

View File

@ -22,8 +22,7 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.stream.IntStream;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
@ -37,7 +36,7 @@ import org.junit.BeforeClass;
public class WriteHeavyIncrementObserverTestBase {
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
protected static TableName NAME = TableName.valueOf("TestCP");

View File

@ -30,7 +30,7 @@ import java.io.PrintStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@ -58,7 +58,7 @@ public class TestMapReduceExamples {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMapReduceExamples.class);
private static HBaseTestingUtility util = new HBaseTestingUtility();
private static HBaseTestingUtil util = new HBaseTestingUtil();
/**
* Test SampleUploader from examples

View File

@ -42,7 +42,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LocalHBaseCluster;
import org.apache.hadoop.hbase.TableName;
@ -96,7 +96,7 @@ public class TestShadeSaslAuthenticationProvider {
private static final char[] USER1_PASSWORD = "foobarbaz".toCharArray();
static LocalHBaseCluster createCluster(HBaseTestingUtility util, File keytabFile,
static LocalHBaseCluster createCluster(HBaseTestingUtil util, File keytabFile,
MiniKdc kdc, Map<String,char[]> userDatabase) throws Exception {
String servicePrincipal = "hbase/localhost";
String spnegoPrincipal = "HTTP/localhost";
@ -140,7 +140,7 @@ public class TestShadeSaslAuthenticationProvider {
}
}
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final Configuration CONF = UTIL.getConfiguration();
private static LocalHBaseCluster CLUSTER;
private static File KEYTAB_FILE;

View File

@ -29,7 +29,7 @@ import javax.security.auth.kerberos.KerberosTicket;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
import org.apache.hadoop.hbase.http.resource.JerseyResource;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@ -98,12 +98,12 @@ public class TestProxyUserSpnegoHttpServer extends HttpServerFunctionalTest {
@BeforeClass
public static void setupServer() throws Exception {
Configuration conf = new Configuration();
HBaseCommonTestingUtility htu = new HBaseCommonTestingUtility(conf);
HBaseCommonTestingUtil htu = new HBaseCommonTestingUtil(conf);
final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST;
kdc = SimpleKdcServerUtil.getRunningSimpleKdcServer(new File(htu.getDataTestDir().toString()),
HBaseCommonTestingUtility::randomFreePort);
HBaseCommonTestingUtil::randomFreePort);
File keytabDir = new File(htu.getDataTestDir("keytabs").toString());
if (keytabDir.exists()) {
deleteRecursively(keytabDir);

View File

@ -26,7 +26,7 @@ import javax.net.ssl.HttpsURLConnection;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -64,12 +64,12 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest {
private static File keystoresDir;
private static String sslConfDir;
private static SSLFactory clientSslFactory;
private static HBaseCommonTestingUtility HTU;
private static HBaseCommonTestingUtil HTU;
@BeforeClass
public static void setup() throws Exception {
HTU = new HBaseCommonTestingUtility();
HTU = new HBaseCommonTestingUtil();
serverConf = HTU.getConfiguration();
serverConf.setInt(HttpServer.HTTP_MAX_THREADS, TestHttpServer.MAX_THREADS);

View File

@ -28,7 +28,7 @@ import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosTicket;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.http.TestHttpServer.EchoServlet;
import org.apache.hadoop.hbase.http.resource.JerseyResource;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@ -88,12 +88,12 @@ public class TestSpnegoHttpServer extends HttpServerFunctionalTest {
@BeforeClass
public static void setupServer() throws Exception {
Configuration conf = new Configuration();
HBaseCommonTestingUtility htu = new HBaseCommonTestingUtility(conf);
HBaseCommonTestingUtil htu = new HBaseCommonTestingUtil(conf);
final String serverPrincipal = "HTTP/" + KDC_SERVER_HOST;
kdc = SimpleKdcServerUtil.getRunningSimpleKdcServer(new File(htu.getDataTestDir().toString()),
HBaseCommonTestingUtility::randomFreePort);
HBaseCommonTestingUtil::randomFreePort);
File keytabDir = new File(htu.getDataTestDir("keytabs").toString());
if (keytabDir.exists()) {
deleteRecursively(keytabDir);

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.http.HttpConfig;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.hbase.http.log.LogLevel.CLI;
@ -83,13 +83,13 @@ public class TestLogLevel {
private static final String LOCALHOST = "localhost";
private static final String clientPrincipal = "client/" + LOCALHOST;
private static String HTTP_PRINCIPAL = "HTTP/" + LOCALHOST;
private static HBaseCommonTestingUtility HTU;
private static HBaseCommonTestingUtil HTU;
private static File keyTabFile;
@BeforeClass
public static void setUp() throws Exception {
serverConf = new Configuration();
HTU = new HBaseCommonTestingUtility(serverConf);
HTU = new HBaseCommonTestingUtil(serverConf);
File keystoreDir = new File(HTU.getDataTestDir("keystore").toString());
keystoreDir.mkdirs();

View File

@ -43,7 +43,7 @@ import java.util.HashMap;
import java.util.Map;
import javax.security.auth.x500.X500Principal;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.bouncycastle.x509.X509V1CertificateGenerator;
@ -281,10 +281,10 @@ public final class KeyStoreTestUtil {
String trustPassword = "trustP";
File sslClientConfFile = new File(
sslConfDir + "/ssl-client-" + System.nanoTime() + "-" + HBaseCommonTestingUtility
sslConfDir + "/ssl-client-" + System.nanoTime() + "-" + HBaseCommonTestingUtil
.getRandomUUID() + ".xml");
File sslServerConfFile = new File(
sslConfDir + "/ssl-server-" + System.nanoTime() + "-" + HBaseCommonTestingUtility
sslConfDir + "/ssl-server-" + System.nanoTime() + "-" + HBaseCommonTestingUtil
.getRandomUUID() + ".xml");
Map<String, X509Certificate> certs = new HashMap<>();

View File

@ -37,13 +37,18 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manages the interactions with an already deployed distributed cluster (as opposed to
* a pseudo-distributed, or mini/local cluster). This is used by integration and system tests.
* Manages the interactions with an already deployed distributed cluster (as opposed to a
* pseudo-distributed, or mini/local cluster). This is used by integration and system tests.
*/
@InterfaceAudience.Private
public class DistributedHBaseCluster extends HBaseCluster {
public class DistributedHBaseCluster extends HBaseClusterInterface {
private static final Logger LOG = LoggerFactory.getLogger(DistributedHBaseCluster.class);
private Admin admin;
private final Connection connection;

View File

@ -217,7 +217,7 @@ public class IntegrationTestBackupRestore extends IntegrationTestBase {
LOG.info("Creating table {} with {} splits.", tableName,
regionsCountPerServer * regionServerCount);
startTime = EnvironmentEdgeManager.currentTime();
HBaseTestingUtility.createPreSplitLoadTestTable(util.getConfiguration(), desc, columns,
HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), desc, columns,
regionsCountPerServer);
util.waitTableAvailable(tableName);
endTime = EnvironmentEdgeManager.currentTime();

View File

@ -114,7 +114,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
protected static final int DEFAULT_NUM_REGIONS = 50; // number of regions in pre-split tables
private boolean keepObjectsAtTheEnd = false;
protected HBaseCluster cluster;
protected HBaseClusterInterface cluster;
protected Connection connection;

View File

@ -64,7 +64,7 @@ public class IntegrationTestIngest extends IntegrationTestBase {
// Log is being used in IntegrationTestIngestWithEncryption, hence it is protected
protected static final Logger LOG = LoggerFactory.getLogger(IntegrationTestIngest.class);
protected IntegrationTestingUtility util;
protected HBaseCluster cluster;
protected HBaseClusterInterface cluster;
protected LoadTestTool loadTool;
protected String[] LOAD_TEST_TOOL_INIT_ARGS = {

View File

@ -46,7 +46,7 @@ public class IntegrationTestIngestStripeCompactions extends IntegrationTestInges
.setValue(HStore.BLOCKING_STOREFILES_KEY, "100").build();
ColumnFamilyDescriptor familyDescriptor =
ColumnFamilyDescriptorBuilder.of(HFileTestUtil.DEFAULT_COLUMN_FAMILY);
HBaseTestingUtility.createPreSplitLoadTestTable(util.getConfiguration(),
HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(),
tableDescriptor, familyDescriptor);
}

View File

@ -57,7 +57,7 @@ public class IntegrationTestMetaReplicas {
StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, 1000);
// Make sure there are three servers.
util.initializeCluster(3);
HBaseTestingUtility.setReplicas(util.getAdmin(), TableName.META_TABLE_NAME, 3);
HBaseTestingUtil.setReplicas(util.getAdmin(), TableName.META_TABLE_NAME, 3);
ZKWatcher zkw = util.getZooKeeperWatcher();
Configuration conf = util.getConfiguration();
String baseZNode = conf.get(HConstants.ZOOKEEPER_ZNODE_PARENT,

View File

@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
/**
* Facility for <strong>integration/system</strong> tests. This extends {@link HBaseTestingUtility}
* Facility for <strong>integration/system</strong> tests. This extends {@link HBaseTestingUtil}
* and adds-in the functionality needed by integration and system tests. This class understands
* distributed and pseudo-distributed/local cluster deployments, and abstracts those from the tests
* in this module.
@ -39,7 +39,7 @@ import org.apache.hadoop.util.ReflectionUtils;
* via {@link #initializeCluster(int)}. Individual tests should not directly call
* {@link #setUseDistributedCluster(Configuration)}.
*/
public class IntegrationTestingUtility extends HBaseTestingUtility {
public class IntegrationTestingUtility extends HBaseTestingUtil {
public IntegrationTestingUtility() {
this(HBaseConfiguration.create());
@ -84,7 +84,7 @@ public class IntegrationTestingUtility extends HBaseTestingUtility {
* exception otherwise.
*/
public void checkNodeCount(int numSlaves) throws Exception {
HBaseCluster cluster = getHBaseClusterInterface();
HBaseClusterInterface cluster = getHBaseClusterInterface();
if (cluster.getClusterMetrics().getLiveServerMetrics().size() < numSlaves) {
throw new Exception("Cluster does not have enough nodes:" + numSlaves);
}

View File

@ -296,7 +296,7 @@ public class StripeCompactionsPerformanceEvaluation extends AbstractHBaseTool {
private void createTable(TableDescriptorBuilder builder)
throws Exception {
deleteTable();
if (util.getHBaseClusterInterface() instanceof MiniHBaseCluster) {
if (util.getHBaseClusterInterface() instanceof SingleProcessHBaseCluster) {
LOG.warn("Test does not make a lot of sense for minicluster. Will set flush size low.");
builder.setValue(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, "1048576");
}

View File

@ -46,12 +46,12 @@ public class TestRESTApiClusterManager {
@Rule
public final TestName testName = new TestName();
private static HBaseCommonTestingUtility testingUtility;
private static HBaseCommonTestingUtil testingUtility;
private ClusterManager clusterManager;
@BeforeClass
public static void beforeClass() {
testingUtility = new HBaseCommonTestingUtility();
testingUtility = new HBaseCommonTestingUtil();
configureClusterManager(testingUtility.getConfiguration());
}

View File

@ -116,10 +116,10 @@ public class TestShellExecEndpointCoprocessor {
}
private static File ensureTestDataDirExists(
final HBaseTestingUtility testingUtility
final HBaseTestingUtil testingUtility
) throws IOException {
final Path testDataDir = Optional.of(testingUtility)
.map(HBaseTestingUtility::getDataTestDir)
.map(HBaseTestingUtil::getDataTestDir)
.map(Object::toString)
.map(val -> Paths.get(val))
.orElseThrow(() -> new RuntimeException("Unable to locate temp directory path."));

View File

@ -32,13 +32,13 @@ import java.util.function.Consumer;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.HBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseClusterInterface;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.ServerMetrics;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants;
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
@ -87,7 +87,7 @@ public abstract class Action {
protected static final long START_NAMENODE_TIMEOUT_DEFAULT = PolicyBasedChaosMonkey.TIMEOUT;
protected ActionContext context;
protected HBaseCluster cluster;
protected HBaseClusterInterface cluster;
protected ClusterMetrics initialStatus;
protected ServerName[] initialServers;
protected Properties monkeyProps;
@ -197,7 +197,7 @@ public abstract class Action {
protected void suspendRs(ServerName server) throws IOException {
getLogger().info("Suspending regionserver {}", server);
cluster.suspendRegionServer(server);
if(!(cluster instanceof MiniHBaseCluster)){
if(!(cluster instanceof SingleProcessHBaseCluster)){
cluster.waitForRegionServerToStop(server, killRsTimeout);
}
getLogger().info("Suspending regionserver {}. Reported num of rs:{}", server,
@ -207,7 +207,7 @@ public abstract class Action {
protected void resumeRs(ServerName server) throws IOException {
getLogger().info("Resuming regionserver {}", server);
cluster.resumeRegionServer(server);
if(!(cluster instanceof MiniHBaseCluster)){
if(!(cluster instanceof SingleProcessHBaseCluster)){
cluster.waitForRegionServerToStart(server.getHostname(), server.getPort(), startRsTimeout);
}
getLogger().info("Resuming regionserver {}. Reported num of rs:{}", server,
@ -342,7 +342,7 @@ public abstract class Action {
*/
protected void modifyAllTableColumns(TableName tableName,
BiConsumer<String, ColumnFamilyDescriptorBuilder> transform) throws IOException {
HBaseTestingUtility util = this.context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = this.context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
@ -401,7 +401,7 @@ public abstract class Action {
return util;
}
public HBaseCluster getHBaseCluster() {
public HBaseClusterInterface getHBaseCluster() {
return util.getHBaseClusterInterface();
}

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import java.util.Random;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.TableDescriptor;
@ -51,7 +51,7 @@ public class ChangeSplitPolicyAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
getLogger().info("Performing action: Change split policy of table " + tableName);

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.CompactType;
@ -52,7 +52,7 @@ public class CompactMobAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
boolean major = RandomUtils.nextInt(0, 100) < majorRatio;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.util.List;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
import org.apache.hadoop.hbase.client.Admin;
@ -56,7 +56,7 @@ public class CompactRandomRegionOfTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
boolean major = RandomUtils.nextInt(0, 100) < majorRatio;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.slf4j.Logger;
@ -52,7 +52,7 @@ public class CompactTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
boolean major = RandomUtils.nextInt(0, 100) < majorRatio;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import java.util.List;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
import org.apache.hadoop.hbase.client.Admin;
@ -51,7 +51,7 @@ public class FlushRandomRegionOfTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
getLogger().info("Performing action: Flush random region of table " + tableName);

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.slf4j.Logger;
@ -47,7 +47,7 @@ public class FlushTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
// Don't try the flush if we're stopping

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.util.List;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionInfo;
@ -51,7 +51,7 @@ public class MergeRandomAdjacentRegionsOfTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
getLogger().info("Performing action: Merge random adjacent regions of table " + tableName);

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import java.util.List;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
import org.apache.hadoop.hbase.client.Admin;
@ -54,7 +54,7 @@ public class MoveRandomRegionOfTableAction extends Action {
Thread.sleep(sleepTime);
}
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
getLogger().info("Performing action: Move random region of table " + tableName);

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@ -48,7 +48,7 @@ public class SnapshotTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
String snapshotName = tableName + "-it-" + EnvironmentEdgeManager.currentTime();
Admin admin = util.getAdmin();

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.io.IOException;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.slf4j.Logger;
@ -51,7 +51,7 @@ public class SplitAllRegionOfTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
// Don't try the split if we're stopping
if (context.isStopping()) {

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import java.util.List;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
import org.apache.hadoop.hbase.client.Admin;
@ -51,7 +51,7 @@ public class SplitRandomRegionOfTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
getLogger().info("Performing action: Split random region of table " + tableName);

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.io.IOException;
import org.apache.hadoop.hbase.DistributedHBaseCluster;
import org.apache.hadoop.hbase.HBaseCluster;
import org.apache.hadoop.hbase.HBaseClusterInterface;
import org.apache.hadoop.hbase.HBaseClusterManager;
/**
@ -38,7 +38,7 @@ abstract public class SudoCommandAction extends Action {
@Override
public void init(ActionContext context) throws IOException {
super.init(context);
HBaseCluster cluster = context.getHBaseCluster();
HBaseClusterInterface cluster = context.getHBaseCluster();
if (cluster instanceof DistributedHBaseCluster){
Object manager = ((DistributedHBaseCluster)cluster).getClusterManager();
if (manager instanceof HBaseClusterManager){

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.chaos.actions;
import java.util.Random;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.slf4j.Logger;
@ -44,7 +44,7 @@ public class TruncateTableAction extends Action {
@Override
public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
HBaseTestingUtil util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
// Don't try the truncate if we're stopping

View File

@ -36,7 +36,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
@ -211,7 +211,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
TableDescriptor desc = admin.getDescriptor(t);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(desc);
builder.setCoprocessor(SlowMeCoproScanOperations.class.getName());
HBaseTestingUtility.modifyTableSync(admin, builder.build());
HBaseTestingUtil.modifyTableSync(admin, builder.build());
}
@Test
@ -252,7 +252,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
if (replicaCount == NUM_REPLICA_COUNT_DEFAULT) return;
TableName t = getTablename();
HBaseTestingUtility.setReplicas(util.getAdmin(), t, replicaCount);
HBaseTestingUtil.setReplicas(util.getAdmin(), t, replicaCount);
}
private void runLinkedListMRJob(int iteration) throws Exception {

View File

@ -42,7 +42,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.IntegrationTestBase;
@ -776,14 +776,14 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
.build();
// If we want to pre-split compute how many splits.
if (conf.getBoolean(HBaseTestingUtility.PRESPLIT_TEST_TABLE_KEY,
HBaseTestingUtility.PRESPLIT_TEST_TABLE)) {
if (conf.getBoolean(HBaseTestingUtil.PRESPLIT_TEST_TABLE_KEY,
HBaseTestingUtil.PRESPLIT_TEST_TABLE)) {
int numberOfServers = admin.getRegionServers().size();
if (numberOfServers == 0) {
throw new IllegalStateException("No live regionservers");
}
int regionsPerServer = conf.getInt(HBaseTestingUtility.REGIONS_PER_SERVER_KEY,
HBaseTestingUtility.DEFAULT_REGIONS_PER_SERVER);
int regionsPerServer = conf.getInt(HBaseTestingUtil.REGIONS_PER_SERVER_KEY,
HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
int totalNumberOfRegions = numberOfServers * regionsPerServer;
LOG.info("Number of live regionservers: " + numberOfServers + ", " +
"pre-splitting table into " + totalNumberOfRegions + " regions " +
@ -1932,9 +1932,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
System.err.println(" -D"+ TABLE_NAME_KEY+ "=<tableName>");
System.err.println(" Run using the <tableName> as the tablename. Defaults to "
+ DEFAULT_TABLE_NAME);
System.err.println(" -D"+ HBaseTestingUtility.REGIONS_PER_SERVER_KEY+ "=<# regions>");
System.err.println(" -D"+ HBaseTestingUtil.REGIONS_PER_SERVER_KEY+ "=<# regions>");
System.err.println(" Create table with presplit regions per server. Defaults to "
+ HBaseTestingUtility.DEFAULT_REGIONS_PER_SERVER);
+ HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
System.err.println(" -DuseMob=<true|false>");
System.err.println(" Create table so that the mob read/write path is forced. " +

View File

@ -39,7 +39,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
@ -448,14 +448,14 @@ public class IntegrationTestLoadCommonCrawl extends IntegrationTestBase {
.setColumnFamilies(families)
.build();
if (getConf().getBoolean(HBaseTestingUtility.PRESPLIT_TEST_TABLE_KEY,
HBaseTestingUtility.PRESPLIT_TEST_TABLE)) {
if (getConf().getBoolean(HBaseTestingUtil.PRESPLIT_TEST_TABLE_KEY,
HBaseTestingUtil.PRESPLIT_TEST_TABLE)) {
int numberOfServers = admin.getRegionServers().size();
if (numberOfServers == 0) {
throw new IllegalStateException("No live regionservers");
}
int regionsPerServer = getConf().getInt(HBaseTestingUtility.REGIONS_PER_SERVER_KEY,
HBaseTestingUtility.DEFAULT_REGIONS_PER_SERVER);
int regionsPerServer = getConf().getInt(HBaseTestingUtil.REGIONS_PER_SERVER_KEY,
HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
int totalNumberOfRegions = numberOfServers * regionsPerServer;
LOG.info("Creating test table: " + tableDescriptor);
LOG.info("Number of live regionservers: " + numberOfServers + ", " +

View File

@ -60,7 +60,7 @@ public class TestPerformanceEvaluation {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
private static final HBaseTestingUtil HTU = new HBaseTestingUtil();
@Test
public void testDefaultInMemoryCompaction() {

View File

@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@ -84,7 +84,7 @@ public class TestTableInputFormat {
private static final Logger LOG = LoggerFactory.getLogger(TestTableInputFormat.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private final static HBaseTestingUtil UTIL = new HBaseTestingUtil();
static final byte[] FAMILY = Bytes.toBytes("family");

View File

@ -29,7 +29,7 @@ import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@ -88,7 +88,7 @@ public class TestTableMapReduceUtil {
private static ImmutableMap<String, ImmutableSet<String>> relation = ImmutableMap
.of(PRESIDENT_PATTERN, presidentsRowKeys, ACTOR_PATTERN, actorsRowKeys);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
@BeforeClass
public static void beforeClass() throws Exception {

View File

@ -21,7 +21,7 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -52,7 +52,7 @@ public class TestTableOutputFormatConnectionExhaust {
private static final Logger LOG =
LoggerFactory.getLogger(TestTableOutputFormatConnectionExhaust.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private final static HBaseTestingUtil UTIL = new HBaseTestingUtil();
static final String TABLE = "TestTableOutputFormatConnectionExhaust";
static final String FAMILY = "family";

View File

@ -24,7 +24,7 @@ import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Result;
@ -91,8 +91,8 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
public static class TestTableSnapshotReducer extends MapReduceBase
implements Reducer<ImmutableBytesWritable, NullWritable, NullWritable, NullWritable> {
HBaseTestingUtility.SeenRowTracker rowTracker =
new HBaseTestingUtility.SeenRowTracker(aaa, after_zzz);
HBaseTestingUtil.SeenRowTracker rowTracker =
new HBaseTestingUtil.SeenRowTracker(aaa, after_zzz);
@Override
public void reduce(ImmutableBytesWritable key, Iterator<NullWritable> values,
@ -171,7 +171,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
}
@Override
protected void testWithMockedMapReduce(HBaseTestingUtility util, String snapshotName,
protected void testWithMockedMapReduce(HBaseTestingUtil util, String snapshotName,
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean setLocalityEnabledTo)
throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
@ -212,8 +212,8 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
Assert.assertEquals(expectedNumSplits, splits.length);
HBaseTestingUtility.SeenRowTracker rowTracker =
new HBaseTestingUtility.SeenRowTracker(startRow, stopRow);
HBaseTestingUtil.SeenRowTracker rowTracker =
new HBaseTestingUtil.SeenRowTracker(startRow, stopRow);
// SNAPSHOT_INPUTFORMAT_LOCALITY_ENABLED_KEY is not explicitly specified,
// so the default value is taken.
@ -256,7 +256,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
}
@Override
protected void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
protected void testWithMapReduceImpl(HBaseTestingUtil util, TableName tableName,
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion,
int expectedNumSplits, boolean shutdownCluster) throws Exception {
doTestWithMapReduce(util, tableName, snapshotName, getStartRow(), getEndRow(), tableDir,
@ -264,7 +264,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
}
// this is also called by the IntegrationTestTableSnapshotInputFormat
public static void doTestWithMapReduce(HBaseTestingUtility util, TableName tableName,
public static void doTestWithMapReduce(HBaseTestingUtil util, TableName tableName,
String snapshotName, byte[] startRow, byte[] endRow, Path tableDir, int numRegions,
int numSplitsPerRegion,int expectedNumSplits, boolean shutdownCluster) throws Exception {

View File

@ -31,7 +31,7 @@ import java.util.NavigableMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
@ -56,7 +56,7 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
*/
public abstract class MultiTableInputFormatTestBase {
static final Logger LOG = LoggerFactory.getLogger(TestMultiTableInputFormat.class);
public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
public static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
static final String TABLE_NAME = "scantest";
static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
static final String KEY_STARTROW = "startRow";

View File

@ -27,8 +27,8 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.StartMiniClusterOption;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.StartTestingClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Result;
@ -51,7 +51,7 @@ import org.slf4j.LoggerFactory;
public abstract class TableSnapshotInputFormatTestBase {
private static final Logger LOG = LoggerFactory.getLogger(TableSnapshotInputFormatTestBase.class);
protected final HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected final HBaseTestingUtil UTIL = new HBaseTestingUtil();
protected static final int NUM_REGION_SERVERS = 2;
protected static final byte[][] FAMILIES = {Bytes.toBytes("f1"), Bytes.toBytes("f2")};
@ -61,7 +61,7 @@ public abstract class TableSnapshotInputFormatTestBase {
@Before
public void setupCluster() throws Exception {
setupConf(UTIL.getConfiguration());
StartMiniClusterOption option = StartMiniClusterOption.builder()
StartTestingClusterOption option = StartTestingClusterOption.builder()
.numRegionServers(NUM_REGION_SERVERS).numDataNodes(NUM_REGION_SERVERS)
.createRootDir(true).build();
UTIL.startMiniCluster(option);
@ -79,11 +79,11 @@ public abstract class TableSnapshotInputFormatTestBase {
conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true);
}
protected abstract void testWithMockedMapReduce(HBaseTestingUtility util, String snapshotName,
protected abstract void testWithMockedMapReduce(HBaseTestingUtil util, String snapshotName,
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean setLocalityEnabledTo)
throws Exception;
protected abstract void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
protected abstract void testWithMapReduceImpl(HBaseTestingUtil util, TableName tableName,
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion,
int expectedNumSplits, boolean shutdownCluster) throws Exception;
@ -160,7 +160,7 @@ public abstract class TableSnapshotInputFormatTestBase {
public abstract void testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName tableName,
String snapshotName, Path tmpTableDir) throws Exception;
protected void testWithMapReduce(HBaseTestingUtility util, String snapshotName, int numRegions,
protected void testWithMapReduce(HBaseTestingUtil util, String snapshotName, int numRegions,
int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster) throws Exception {
Path tableDir = util.getDataTestDirOnTestFS(snapshotName);
TableName tableName = TableName.valueOf("testWithMapReduce");
@ -188,7 +188,7 @@ public abstract class TableSnapshotInputFormatTestBase {
}
}
protected static void createTableAndSnapshot(HBaseTestingUtility util, TableName tableName,
protected static void createTableAndSnapshot(HBaseTestingUtil util, TableName tableName,
String snapshotName, byte[] startRow, byte[] endRow, int numRegions)
throws Exception {
try {

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
@ -56,7 +56,7 @@ public class TestCellCounter {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCellCounter.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x01row1");
private static final byte[] ROW2 = Bytes.toBytesBinary("\\x01row2");
private static final String FAMILY_A_STRING = "a";

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -67,7 +67,7 @@ public class TestCopyTable {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCopyTable.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
private static final byte[] ROW1 = Bytes.toBytes("row1");
private static final byte[] ROW2 = Bytes.toBytes("row2");
private static final String FAMILY_A_STRING = "a";

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.junit.ClassRule;
@ -48,7 +48,7 @@ public class TestHBaseMRTestingUtility {
dummyProps.put("mapreduce.jobhistory.webapp.address", "dummyhost:11238");
dummyProps.put("yarn.resourcemanager.webapp.address", "dummyhost:11239");
HBaseTestingUtility hbt = new HBaseTestingUtility();
HBaseTestingUtil hbt = new HBaseTestingUtil();
// populate the mr props to the Configuration instance
for (Map.Entry<String, String> entry : dummyProps.entrySet()) {

View File

@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HadoopShims;
@ -66,7 +66,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.PerformanceEvaluation;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.StartMiniClusterOption;
import org.apache.hadoop.hbase.StartTestingClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
@ -153,7 +153,7 @@ public class TestHFileOutputFormat2 {
private static final TableName[] TABLE_NAMES = Stream.of("TestTable", "TestTable2",
"TestTable3").map(TableName::valueOf).toArray(TableName[]::new);
private HBaseTestingUtility util = new HBaseTestingUtility();
private HBaseTestingUtil util = new HBaseTestingUtil();
private static final Logger LOG = LoggerFactory.getLogger(TestHFileOutputFormat2.class);
@ -635,7 +635,7 @@ public class TestHFileOutputFormat2 {
private void doIncrementalLoadTest(boolean shouldChangeRegions, boolean shouldKeepLocality,
boolean putSortReducer, List<String> tableStr) throws Exception {
util = new HBaseTestingUtility();
util = new HBaseTestingUtil();
Configuration conf = util.getConfiguration();
conf.setBoolean(MultiTableHFileOutputFormat.LOCALITY_SENSITIVE_CONF_KEY, shouldKeepLocality);
int hostCount = 1;
@ -651,7 +651,7 @@ public class TestHFileOutputFormat2 {
for (int i = 0; i < hostCount; ++i) {
hostnames[i] = "datanode_" + i;
}
StartMiniClusterOption option = StartMiniClusterOption.builder()
StartTestingClusterOption option = StartTestingClusterOption.builder()
.numRegionServers(hostCount).dataNodeHosts(hostnames).build();
util.startMiniCluster(option);
@ -1177,7 +1177,7 @@ public class TestHFileOutputFormat2 {
TableDescriptorBuilder.newBuilder(TABLE_NAMES[0]);
Mockito.doReturn(tableDescriptorBuilder.build()).when(table).getDescriptor();
for (ColumnFamilyDescriptor hcd : HBaseTestingUtility.generateColumnDescriptors()) {
for (ColumnFamilyDescriptor hcd : HBaseTestingUtil.generateColumnDescriptors()) {
tableDescriptorBuilder.setColumnFamily(hcd);
}
@ -1454,7 +1454,7 @@ public class TestHFileOutputFormat2 {
public void manualTest(String args[]) throws Exception {
Configuration conf = HBaseConfiguration.create();
util = new HBaseTestingUtility(conf);
util = new HBaseTestingUtil(conf);
if ("newtable".equals(args[0])) {
TableName tname = TableName.valueOf(args[1]);
byte[][] splitKeys = generateRandomSplitKeys(4);
@ -1476,7 +1476,7 @@ public class TestHFileOutputFormat2 {
@Test
public void testBlockStoragePolicy() throws Exception {
util = new HBaseTestingUtility();
util = new HBaseTestingUtil();
Configuration conf = util.getConfiguration();
conf.set(HFileOutputFormat2.STORAGE_POLICY_PROPERTY, "ALL_SSD");
@ -1652,7 +1652,7 @@ public class TestHFileOutputFormat2 {
@Test
public void testMRIncrementalLoadWithLocalityMultiCluster() throws Exception {
// Start cluster A
util = new HBaseTestingUtility();
util = new HBaseTestingUtil();
Configuration confA = util.getConfiguration();
int hostCount = 3;
int regionNum = 20;
@ -1660,12 +1660,12 @@ public class TestHFileOutputFormat2 {
for (int i = 0; i < hostCount; ++i) {
hostnames[i] = "datanode_" + i;
}
StartMiniClusterOption option = StartMiniClusterOption.builder()
StartTestingClusterOption option = StartTestingClusterOption.builder()
.numRegionServers(hostCount).dataNodeHosts(hostnames).build();
util.startMiniCluster(option);
// Start cluster B
HBaseTestingUtility utilB = new HBaseTestingUtility();
HBaseTestingUtil utilB = new HBaseTestingUtil();
Configuration confB = utilB.getConfiguration();
utilB.startMiniCluster(option);

View File

@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
@ -42,7 +42,7 @@ public class TestHRegionPartitioner {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHRegionPartitioner.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
@Rule
public TestName name = new TestName();

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
@ -58,7 +58,7 @@ public class TestHashTable {
private static final Logger LOG = LoggerFactory.getLogger(TestHashTable.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
@Rule
public TestName name = new TestName();

View File

@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
@ -118,7 +118,7 @@ public class TestImportExport {
HBaseClassTestRule.forClass(TestImportExport.class);
private static final Logger LOG = LoggerFactory.getLogger(TestImportExport.class);
protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2");
private static final byte[] ROW3 = Bytes.toBytesBinary("\\x32row3");

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@ -75,7 +75,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
private static final Logger LOG =
LoggerFactory.getLogger(TestImportTSVWithOperationAttributes.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
protected static HBaseTestingUtil util = new HBaseTestingUtil();
/**
* Delete the tmp directory after running doMROnTableTest. Boolean. Default is
@ -163,7 +163,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
* @param dataAvailable
* @return The Tool instance used to run the test.
*/
private Tool doMROnTableTest(HBaseTestingUtility util, String family, String data, String[] args,
private Tool doMROnTableTest(HBaseTestingUtil util, String family, String data, String[] args,
int valueMultiplier, boolean dataAvailable) throws Exception {
String table = args[args.length - 1];
Configuration conf = new Configuration(util.getConfiguration());

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
@ -64,7 +64,7 @@ public class TestImportTSVWithTTLs implements Configurable {
protected static final Logger LOG = LoggerFactory.getLogger(TestImportTSVWithTTLs.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
protected static HBaseTestingUtil util = new HBaseTestingUtil();
/**
* Delete the tmp directory after running doMROnTableTest. Boolean. Default is
@ -124,7 +124,7 @@ public class TestImportTSVWithTTLs implements Configurable {
util.deleteTable(tableName);
}
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
protected static Tool doMROnTableTest(HBaseTestingUtil util, String family, String data,
String[] args, int valueMultiplier) throws Exception {
TableName table = TableName.valueOf(args[args.length - 1]);
Configuration conf = new Configuration(util.getConfiguration());

View File

@ -36,7 +36,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@ -86,7 +86,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
private static final Logger LOG =
LoggerFactory.getLogger(TestImportTSVWithVisibilityLabels.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
protected static HBaseTestingUtil util = new HBaseTestingUtil();
/**
* Delete the tmp directory after running doMROnTableTest. Boolean. Default is
@ -320,7 +320,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
util.deleteTable(tableName);
}
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
protected static Tool doMROnTableTest(HBaseTestingUtil util, String family, String data,
String[] args, int valueMultiplier) throws Exception {
return doMROnTableTest(util, family, data, args, valueMultiplier, -1);
}
@ -338,7 +338,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
*
* @return The Tool instance used to run the test.
*/
protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
protected static Tool doMROnTableTest(HBaseTestingUtil util, String family, String data,
String[] args, int valueMultiplier,int expectedKVCount) throws Exception {
TableName table = TableName.valueOf(args[args.length - 1]);
Configuration conf = new Configuration(util.getConfiguration());

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
@ -80,7 +80,7 @@ public class TestImportTsv implements Configurable {
private static final Logger LOG = LoggerFactory.getLogger(TestImportTsv.class);
protected static final String NAME = TestImportTsv.class.getSimpleName();
protected static HBaseTestingUtility util = new HBaseTestingUtility();
protected static HBaseTestingUtil util = new HBaseTestingUtil();
// Delete the tmp directory after running doMROnTableTest. Boolean. Default is true.
protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
@ -385,7 +385,7 @@ public class TestImportTsv implements Configurable {
return doMROnTableTest(util, tn, FAMILY, data, args, valueMultiplier,-1);
}
protected static Tool doMROnTableTest(HBaseTestingUtility util, TableName table,
protected static Tool doMROnTableTest(HBaseTestingUtil util, TableName table,
String family, String data, Map<String, String> args) throws Exception {
return doMROnTableTest(util, table, family, data, args, 1,-1);
}
@ -398,7 +398,7 @@ public class TestImportTsv implements Configurable {
* @param args Any arguments to pass BEFORE inputFile path is appended.
* @return The Tool instance used to run the test.
*/
protected static Tool doMROnTableTest(HBaseTestingUtility util, TableName table,
protected static Tool doMROnTableTest(HBaseTestingUtil util, TableName table,
String family, String data, Map<String, String> args, int valueMultiplier,int expectedKVCount)
throws Exception {
Configuration conf = new Configuration(util.getConfiguration());

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
@ -65,8 +65,8 @@ public class TestMultithreadedTableMapper {
HBaseClassTestRule.forClass(TestMultithreadedTableMapper.class);
private static final Logger LOG = LoggerFactory.getLogger(TestMultithreadedTableMapper.class);
private static final HBaseTestingUtility UTIL =
new HBaseTestingUtility();
private static final HBaseTestingUtil UTIL =
new HBaseTestingUtil();
static final TableName MULTI_REGION_TABLE_NAME = TableName.valueOf("mrtest");
static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
static final byte[] OUTPUT_FAMILY = Bytes.toBytes("text");

View File

@ -28,7 +28,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
@ -58,7 +58,7 @@ public class TestRowCounter {
HBaseClassTestRule.forClass(TestRowCounter.class);
private static final Logger LOG = LoggerFactory.getLogger(TestRowCounter.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
private final static String TABLE_NAME = "testRowCounter";
private final static String TABLE_NAME_TS_RANGE = "testRowCounter_ts_range";
private final static String COL_FAM = "col_fam";

View File

@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
@ -41,7 +41,7 @@ public class TestSimpleTotalOrderPartitioner {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSimpleTotalOrderPartitioner.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
Configuration conf = TEST_UTIL.getConfiguration();
@Test

Some files were not shown because too many files have changed in this diff Show More