From 1004876bad1947e301bf8bdabf6979444aee7ee0 Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Sat, 20 Aug 2022 23:29:20 +0800 Subject: [PATCH] HBASE-27252 Clean up error-prone findings in hbase-it Close #4662 Co-authored-by: Duo Zhang Signed-off-by: Duo Zhang Signed-off-by: Viraj Jasani --- .../apache/hadoop/hbase/chaos/ChaosAgent.java | 9 +- .../apache/hadoop/hbase/ChaosZKClient.java | 5 +- .../apache/hadoop/hbase/ClusterManager.java | 2 +- .../hadoop/hbase/DistributedHBaseCluster.java | 6 +- .../hadoop/hbase/HBaseClusterManager.java | 8 -- .../hbase/IntegrationTestBackupRestore.java | 11 +-- .../hadoop/hbase/IntegrationTestIngest.java | 7 +- .../hbase/IntegrationTestIngestWithMOB.java | 1 + .../hbase/IntegrationTestLazyCfLoading.java | 2 + .../hbase/IntegrationTestMobCompaction.java | 12 ++- .../IntegrationTestRegionReplicaPerf.java | 4 +- ...tegrationTestRegionReplicaReplication.java | 3 +- .../hbase/IntegrationTestingUtility.java | 7 +- .../hadoop/hbase/IntegrationTestsDriver.java | 2 +- ...tripeCompactionsPerformanceEvaluation.java | 13 ++- .../TestShellExecEndpointCoprocessor.java | 4 +- .../hadoop/hbase/chaos/actions/Action.java | 5 +- .../hbase/chaos/actions/AddCPULoadAction.java | 2 + .../actions/ChangeBloomFilterAction.java | 8 +- .../actions/CorruptPacketsCommandAction.java | 1 + .../actions/DelayPacketsCommandAction.java | 1 + .../DuplicatePacketsCommandAction.java | 1 + .../chaos/actions/FillDiskCommandAction.java | 1 + .../actions/LosePacketsCommandAction.java | 1 + .../actions/ReorderPacketsCommandAction.java | 1 + .../actions/RestartRandomDataNodeAction.java | 4 +- .../actions/RollingBatchRestartRsAction.java | 5 +- .../RollingBatchSuspendResumeRsAction.java | 6 +- .../actions/SplitAllRegionOfTableAction.java | 1 + .../UnbalanceKillAndRebalanceAction.java | 3 +- .../chaos/actions/UnbalanceRegionsAction.java | 3 +- ...gurableSlowDeterministicMonkeyFactory.java | 27 +++-- .../hbase/chaos/util/ChaosMonkeyRunner.java | 2 +- .../hbase/ipc/IntegrationTestRpcClient.java | 3 +- .../mapreduce/IntegrationTestBulkLoad.java | 2 +- .../mapreduce/IntegrationTestImportTsv.java | 12 ++- .../IntegrationTestTableMapReduceUtil.java | 1 + ...tegrationTestTableSnapshotInputFormat.java | 10 +- .../test/IntegrationTestBigLinkedList.java | 99 +++++++++---------- ...rationTestBigLinkedListWithVisibility.java | 20 ++-- .../test/IntegrationTestLoadAndVerify.java | 26 ++--- .../test/IntegrationTestLoadCommonCrawl.java | 11 ++- .../test/IntegrationTestReplication.java | 30 ++++-- ...TimeBoundedRequestsWithRegionReplicas.java | 5 +- ...onTestWithCellVisibilityLoadAndVerify.java | 14 ++- .../IntegrationTestSendTraceRequests.java | 53 +++++----- 46 files changed, 240 insertions(+), 214 deletions(-) diff --git a/hbase-it/src/main/java/org/apache/hadoop/hbase/chaos/ChaosAgent.java b/hbase-it/src/main/java/org/apache/hadoop/hbase/chaos/ChaosAgent.java index deeb7625130..197977d4f3a 100644 --- a/hbase-it/src/main/java/org/apache/hadoop/hbase/chaos/ChaosAgent.java +++ b/hbase-it/src/main/java/org/apache/hadoop/hbase/chaos/ChaosAgent.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.chaos; import java.io.Closeable; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; @@ -224,10 +225,11 @@ public class ChaosAgent implements Watcher, Closeable, Runnable { } catch (Exception e) { break; } - zk.getData(path, false, getTaskForExecutionCallback, new String(data)); + zk.getData(path, false, getTaskForExecutionCallback, + new String(data, StandardCharsets.UTF_8)); break; case OK: - String cmd = new String(data); + String cmd = new String(data, StandardCharsets.UTF_8); LOG.info("Executing command : " + cmd); String status = ChaosConstants.TASK_COMPLETION_STRING; try { @@ -368,7 +370,8 @@ public class ChaosAgent implements Watcher, Closeable, Runnable { */ public void setStatusOfTaskZNode(String taskZNode, String status) { LOG.info("Setting status of Task ZNode: " + taskZNode + " status : " + status); - zk.setData(taskZNode, status.getBytes(), -1, setStatusOfTaskZNodeCallback, null); + zk.setData(taskZNode, status.getBytes(StandardCharsets.UTF_8), -1, setStatusOfTaskZNodeCallback, + null); } /** diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ChaosZKClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ChaosZKClient.java index 356759bb155..9212dc29583 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ChaosZKClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ChaosZKClient.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; import org.apache.yetus.audience.InterfaceAudience; @@ -111,7 +112,7 @@ public class ChaosZKClient { zk.create( CHAOS_AGENT_STATUS_ZNODE + ZNODE_PATH_SEPARATOR + taskObject.getTaskHostname() + ZNODE_PATH_SEPARATOR + TASK_PREFIX, - taskObject.getCommand().getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, + taskObject.getCommand().getBytes(StandardCharsets.UTF_8), ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, submitTaskCallback, taskObject); long start = EnvironmentEdgeManager.currentTime(); @@ -189,7 +190,7 @@ public class ChaosZKClient { case OK: if (ctx != null) { - String status = new String(data); + String status = new String(data, StandardCharsets.UTF_8); taskStatus = status; switch (status) { case TASK_COMPLETION_STRING: diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ClusterManager.java index 5cc4b8eafba..4355b386adf 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ClusterManager.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ClusterManager.java @@ -40,7 +40,7 @@ interface ClusterManager extends Configurable { HBASE_MASTER("master"), HBASE_REGIONSERVER("regionserver"); - private String name; + private final String name; ServiceType(String name) { this.name = name; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java index 024f954aac5..ae7fef86500 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java @@ -344,9 +344,9 @@ public class DistributedHBaseCluster extends HBaseClusterInterface { // do a best effort restore boolean success = true; - success = restoreMasters(initial, current) & success; - success = restoreRegionServers(initial, current) & success; - success = restoreAdmin() & success; + success = restoreMasters(initial, current) && success; + success = restoreRegionServers(initial, current) && success; + success = restoreAdmin() && success; LOG.info("Restoring cluster - done"); return success; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java index d6abc353158..a09a690c89a 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java @@ -280,22 +280,14 @@ public class HBaseClusterManager extends Configured implements ClusterManager { */ static class ZookeeperShellCommandProvider extends CommandProvider { private final String zookeeperHome; - private final String confDir; ZookeeperShellCommandProvider(Configuration conf) throws IOException { zookeeperHome = conf.get("hbase.it.clustermanager.zookeeper.home", System.getenv("ZOOBINDIR")); - String tmp = - conf.get("hbase.it.clustermanager.zookeeper.conf.dir", System.getenv("ZOOCFGDIR")); if (zookeeperHome == null) { throw new IOException("ZooKeeper home configuration parameter i.e. " + "'hbase.it.clustermanager.zookeeper.home' is not configured properly."); } - if (tmp != null) { - confDir = String.format("--config %s", tmp); - } else { - confDir = ""; - } } @Override diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java index fd360943289..1a0446381ae 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java @@ -244,14 +244,12 @@ public class IntegrationTestBackupRestore extends IntegrationTestBase { private void runTestSingle(TableName table) throws IOException { List backupIds = new ArrayList(); - List tableSizes = new ArrayList(); try (Connection conn = util.getConnection(); Admin admin = conn.getAdmin(); BackupAdmin client = new BackupAdminImpl(conn);) { // #0- insert some data to table 'table' loadData(table, rowsInIteration); - tableSizes.add(rowsInIteration); // #1 - create full backup for table first LOG.info("create full backup image for {}", table); @@ -270,7 +268,6 @@ public class IntegrationTestBackupRestore extends IntegrationTestBase { // Load data loadData(table, rowsInIteration); - tableSizes.add(rowsInIteration * count); // Do incremental backup builder = new BackupRequest.Builder(); request = builder.withBackupType(BackupType.INCREMENTAL).withTableList(tables) @@ -321,10 +318,7 @@ public class IntegrationTestBackupRestore extends IntegrationTestBase { return arr; } - /** - * @param backupId pass backup ID to check status of - * @return status of backup - */ + /** Returns status of backup */ protected boolean checkSucceeded(String backupId) throws IOException { BackupInfo status = getBackupInfo(backupId); if (status == null) { @@ -428,9 +422,6 @@ public class IntegrationTestBackupRestore extends IntegrationTestBase { .add(NUMBER_OF_TABLES_KEY, numTables).add(SLEEP_TIME_KEY, sleepTime).toString()); } - /** - * @param args argument list - */ public static void main(String[] args) throws Exception { Configuration conf = HBaseConfiguration.create(); IntegrationTestingUtility.setUseDistributedCluster(conf); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java index 41edaf547d1..dfd6483b2ef 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java @@ -36,6 +36,7 @@ import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; /** @@ -63,7 +64,6 @@ public class IntegrationTestIngest extends IntegrationTestBase { // Log is being used in IntegrationTestIngestWithEncryption, hence it is protected protected static final Logger LOG = LoggerFactory.getLogger(IntegrationTestIngest.class); - protected IntegrationTestingUtility util; protected HBaseClusterInterface cluster; protected LoadTestTool loadTool; @@ -137,7 +137,7 @@ public class IntegrationTestIngest extends IntegrationTestBase { families.add(Bytes.toString(family)); } } else { - for (String family : familiesString.split(",")) { + for (String family : Splitter.on(',').split(familiesString)) { families.add(family); } } @@ -168,8 +168,7 @@ public class IntegrationTestIngest extends IntegrationTestBase { LOG.info("Intended run time: " + (runtime / 60000) + " min, left:" + ((runtime - (EnvironmentEdgeManager.currentTime() - start)) / 60000) + " min"); - int ret = -1; - ret = loadTool.run(getArgsForLoadTestTool("-write", + int ret = loadTool.run(getArgsForLoadTestTool("-write", String.format("%d:%d:%d", colsPerKey, recordSize, writeThreads), startKey, numKeys)); if (0 != ret) { String errorMsg = "Load failed with error code " + ret; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java index fe1ceafbc78..b82056c9621 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithMOB.java @@ -105,6 +105,7 @@ public class IntegrationTestIngestWithMOB extends IntegrationTestIngest { } @Test + @Override public void testIngest() throws Exception { runIngestTest(JUNIT_RUN_TIME, 100, 10, 1024, 10, 20); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java index e77aaba503f..1e514b070dd 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java @@ -58,7 +58,9 @@ import org.slf4j.LoggerFactory; public class IntegrationTestLazyCfLoading { private static final TableName TABLE_NAME = TableName.valueOf(IntegrationTestLazyCfLoading.class.getSimpleName()); + @SuppressWarnings("InlineFormatString") private static final String TIMEOUT_KEY = "hbase.%s.timeout"; + @SuppressWarnings("InlineFormatString") private static final String ENCODING_KEY = "hbase.%s.datablock.encoding"; /** A soft test timeout; duration of the test, as such, depends on number of keys to put. */ diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java index 0d9d9886db0..f54d815ad4d 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestMobCompaction.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Set; import org.apache.hadoop.conf.Configuration; @@ -226,7 +227,7 @@ public class IntegrationTestMobCompaction extends IntegrationTestBase { } - class MajorCompaction implements Runnable { + static class MajorCompaction implements Runnable { @Override public void run() { @@ -242,7 +243,7 @@ public class IntegrationTestMobCompaction extends IntegrationTestBase { } } - class CleanMobAndArchive implements Runnable { + static class CleanMobAndArchive implements Runnable { @Override public void run() { @@ -257,7 +258,7 @@ public class IntegrationTestMobCompaction extends IntegrationTestBase { Thread.sleep(130000); } catch (Exception e) { - e.printStackTrace(); + LOG.warn("Exception in CleanMobAndArchive", e); } } } @@ -288,7 +289,8 @@ public class IntegrationTestMobCompaction extends IntegrationTestBase { try { Thread.sleep(500); } catch (InterruptedException ee) { - + // Restore interrupt status + Thread.currentThread().interrupt(); } } if (i % 100000 == 0) { @@ -323,7 +325,7 @@ public class IntegrationTestMobCompaction extends IntegrationTestBase { Thread.sleep(1000); } - getNumberOfMobFiles(conf, table.getName(), new String(fam)); + getNumberOfMobFiles(conf, table.getName(), new String(fam, StandardCharsets.UTF_8)); LOG.info("Waiting for write thread to finish ..."); writeData.join(); // Cleanup again diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java index bccc23b4b0a..644d89a4e31 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java @@ -23,9 +23,9 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import com.codahale.metrics.Histogram; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; -import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.Set; @@ -104,7 +104,7 @@ public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase { * Wraps the invocation of {@link PerformanceEvaluation} in a {@code Callable}. */ static class PerfEvalCallable implements Callable { - private final Queue argv = new LinkedList<>(); + private final Queue argv = new ArrayDeque<>(); private final Admin admin; public PerfEvalCallable(Admin admin, String argv) { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java index 4b5b3c18480..9462d0a47a9 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java @@ -175,7 +175,6 @@ public class IntegrationTestRegionReplicaReplication extends IntegrationTestInge int verifyPercent = 100; int updatePercent = 20; - int ret = -1; int regionReplicaId = conf.getInt(String.format("%s.%s", TEST_NAME, LoadTestTool.OPT_REGION_REPLICA_ID), 1); @@ -191,7 +190,7 @@ public class IntegrationTestRegionReplicaReplication extends IntegrationTestInge args.add("-" + LoadTestTool.OPT_REGION_REPLICA_ID); args.add(String.valueOf(regionReplicaId)); - ret = loadTool.run(args.toArray(new String[args.size()])); + int ret = loadTool.run(args.toArray(new String[args.size()])); if (0 != ret) { String errorMsg = "Load failed with error code " + ret; LOG.error(errorMsg); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java index 7cdd46919f7..729110c04a0 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java @@ -116,14 +116,13 @@ public class IntegrationTestingUtility extends HBaseTestingUtil { } /** - * @return whether we are interacting with a distributed cluster as opposed to and in-process mini - * cluster or a local cluster. + * Returns whether we are interacting with a distributed cluster as opposed to and in-process mini + * cluster or a local cluster. * @see IntegrationTestingUtility#setUseDistributedCluster(Configuration) */ public boolean isDistributedCluster() { Configuration conf = getConfiguration(); - boolean isDistributedCluster = false; - isDistributedCluster = + boolean isDistributedCluster = Boolean.parseBoolean(System.getProperty(IS_DISTRIBUTED_CLUSTER, "false")); if (!isDistributedCluster) { isDistributedCluster = conf.getBoolean(IS_DISTRIBUTED_CLUSTER, false); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java index 8e59ee855eb..3b590493a9f 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java @@ -47,7 +47,7 @@ public class IntegrationTestsDriver extends AbstractHBaseTool { System.exit(ret); } - private class IntegrationTestFilter extends ClassTestFinder.TestClassFilter { + private static class IntegrationTestFilter extends ClassTestFinder.TestClassFilter { private Pattern testFilterRe = Pattern.compile(".*\\.IntegrationTest.*"); public IntegrationTestFilter() { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java index 16f1dc7dd71..d23255d7ee2 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; +import java.util.List; import java.util.Locale; import java.util.Set; import org.apache.commons.lang3.StringUtils; @@ -43,6 +44,8 @@ import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; +import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; /** @@ -108,10 +111,12 @@ public class StripeCompactionsPerformanceEvaluation extends AbstractHBaseTool { int minValueSize = 0, maxValueSize = 0; String valueSize = cmd.getOptionValue(VALUE_SIZE_KEY, VALUE_SIZE_DEFAULT); if (valueSize.contains(":")) { - String[] valueSizes = valueSize.split(":"); - if (valueSize.length() != 2) throw new RuntimeException("Invalid value size: " + valueSize); - minValueSize = Integer.parseInt(valueSizes[0]); - maxValueSize = Integer.parseInt(valueSizes[1]); + List valueSizes = Splitter.on(':').splitToList(valueSize); + if (valueSizes.size() != 2) { + throw new RuntimeException("Invalid value size: " + valueSize); + } + minValueSize = Integer.parseInt(Iterables.get(valueSizes, 0)); + maxValueSize = Integer.parseInt(Iterables.get(valueSizes, 1)); } else { minValueSize = maxValueSize = Integer.parseInt(valueSize); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java index a806f825e6c..4496318e0e7 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/TestShellExecEndpointCoprocessor.java @@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -106,7 +107,8 @@ public class TestShellExecEndpointCoprocessor { assertFalse("the response from a background task should have no stderr", resp.hasStderr()); Waiter.waitFor(conn.getConfiguration(), 5_000, () -> testFile.length() > 0); - final String content = new String(Files.readAllBytes(testFile.toPath())).trim(); + final String content = + new String(Files.readAllBytes(testFile.toPath()), StandardCharsets.UTF_8).trim(); assertEquals("hello world", content); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java index 8fc2babe7b8..5d3d768b200 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; @@ -277,13 +276,13 @@ public abstract class Action { protected void unbalanceRegions(ClusterMetrics clusterStatus, List fromServers, List toServers, double fractionOfRegions) throws Exception { - List victimRegions = new LinkedList<>(); + List victimRegions = new ArrayList<>(); for (Map.Entry entry : clusterStatus.getLiveServerMetrics() .entrySet()) { ServerName sn = entry.getKey(); ServerMetrics serverLoad = entry.getValue(); // Ugh. - List regions = new LinkedList<>(serverLoad.getRegionMetrics().keySet()); + List regions = new ArrayList<>(serverLoad.getRegionMetrics().keySet()); int victimRegionCount = (int) Math.ceil(fractionOfRegions * regions.size()); getLogger().debug("Removing {} regions from {}", victimRegionCount, sn); Random rand = ThreadLocalRandom.current(); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddCPULoadAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddCPULoadAction.java index 30bb5146f07..4ff5baf5a6e 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddCPULoadAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddCPULoadAction.java @@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory; */ public class AddCPULoadAction extends SudoCommandAction { private static final Logger LOG = LoggerFactory.getLogger(AddCPULoadAction.class); + @SuppressWarnings("InlineFormatString") private static final String CPU_LOAD_COMMAND = "seq 1 %s | xargs -I{} -n 1 -P %s timeout %s dd if=/dev/urandom of=/dev/null bs=1M " + "iflag=fullblock"; @@ -51,6 +52,7 @@ public class AddCPULoadAction extends SudoCommandAction { return LOG; } + @Override protected void localPerform() throws IOException { getLogger().info("Starting to execute AddCPULoadAction"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeBloomFilterAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeBloomFilterAction.java index 3d7db596312..fdb0513cc7f 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeBloomFilterAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ChangeBloomFilterAction.java @@ -28,16 +28,10 @@ import org.slf4j.LoggerFactory; * Action that tries to adjust the bloom filter setting on all the columns of a table */ public class ChangeBloomFilterAction extends Action { - private final long sleepTime; - private final TableName tableName; private static final Logger LOG = LoggerFactory.getLogger(ChangeBloomFilterAction.class); + private final TableName tableName; public ChangeBloomFilterAction(TableName tableName) { - this(-1, tableName); - } - - public ChangeBloomFilterAction(int sleepTime, TableName tableName) { - this.sleepTime = sleepTime; this.tableName = tableName; } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CorruptPacketsCommandAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CorruptPacketsCommandAction.java index fe9a6f60159..8594ec5907d 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CorruptPacketsCommandAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/CorruptPacketsCommandAction.java @@ -50,6 +50,7 @@ public class CorruptPacketsCommandAction extends TCCommandAction { return LOG; } + @Override protected void localPerform() throws IOException { getLogger().info("Starting to execute CorruptPacketsCommandAction"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DelayPacketsCommandAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DelayPacketsCommandAction.java index d92828ec612..4d00e5c5359 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DelayPacketsCommandAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DelayPacketsCommandAction.java @@ -50,6 +50,7 @@ public class DelayPacketsCommandAction extends TCCommandAction { return LOG; } + @Override protected void localPerform() throws IOException { getLogger().info("Starting to execute DelayPacketsCommandAction"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DuplicatePacketsCommandAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DuplicatePacketsCommandAction.java index 722ebc4b6c3..8d63055b6b1 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DuplicatePacketsCommandAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DuplicatePacketsCommandAction.java @@ -50,6 +50,7 @@ public class DuplicatePacketsCommandAction extends TCCommandAction { return LOG; } + @Override protected void localPerform() throws IOException { getLogger().info("Starting to execute DuplicatePacketsCommandAction"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/FillDiskCommandAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/FillDiskCommandAction.java index ea694f0ba9a..c5ec238e474 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/FillDiskCommandAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/FillDiskCommandAction.java @@ -54,6 +54,7 @@ public class FillDiskCommandAction extends SudoCommandAction { return LOG; } + @Override protected void localPerform() throws IOException { getLogger().info("Starting to execute FillDiskCommandAction"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/LosePacketsCommandAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/LosePacketsCommandAction.java index b550a5a657e..a01f380d04a 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/LosePacketsCommandAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/LosePacketsCommandAction.java @@ -50,6 +50,7 @@ public class LosePacketsCommandAction extends TCCommandAction { return LOG; } + @Override protected void localPerform() throws IOException { getLogger().info("Starting to execute LosePacketsCommandAction"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ReorderPacketsCommandAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ReorderPacketsCommandAction.java index bb4d87ca532..d75537fd447 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ReorderPacketsCommandAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/ReorderPacketsCommandAction.java @@ -54,6 +54,7 @@ public class ReorderPacketsCommandAction extends TCCommandAction { return LOG; } + @Override protected void localPerform() throws IOException { getLogger().info("Starting to execute ReorderPacketsCommandAction"); ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getCurrentServers()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java index b039738e3d3..660d4c04277 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RestartRandomDataNodeAction.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.chaos.actions; import java.io.IOException; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey; @@ -56,7 +56,7 @@ public class RestartRandomDataNodeAction extends RestartActionBaseAction { DistributedFileSystem fs = (DistributedFileSystem) CommonFSUtils.getRootDir(getConf()).getFileSystem(getConf()); DFSClient dfsClient = fs.getClient(); - List hosts = new LinkedList<>(); + List hosts = new ArrayList<>(); for (DatanodeInfo dataNode : dfsClient.datanodeReport(HdfsConstants.DatanodeReportType.LIVE)) { hosts.add(ServerName.valueOf(dataNode.getHostName(), -1, -1)); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java index 54183e042ae..cdff1aa3e42 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.chaos.actions; import java.io.IOException; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; @@ -66,11 +67,13 @@ public class RollingBatchRestartRsAction extends BatchRestartRsAction { } @Override + // deadServers is both list and queue here, a valid use case for LinkedList + @SuppressWarnings("JdkObsolete") public void perform() throws Exception { getLogger().info("Performing action: Rolling batch restarting {}% of region servers", (int) (ratio * 100)); List selectedServers = selectServers(); - Queue serversToBeKilled = new LinkedList<>(selectedServers); + Queue serversToBeKilled = new ArrayDeque<>(selectedServers); LinkedList deadServers = new LinkedList<>(); Random rand = ThreadLocalRandom.current(); // loop while there are servers to be killed or dead servers to be restarted diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchSuspendResumeRsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchSuspendResumeRsAction.java index b80fa2c06f6..07261bb58e0 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchSuspendResumeRsAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchSuspendResumeRsAction.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.chaos.actions; import java.io.IOException; -import java.util.LinkedList; +import java.util.ArrayDeque; import java.util.List; import java.util.Queue; import java.util.Random; @@ -68,8 +68,8 @@ public class RollingBatchSuspendResumeRsAction extends Action { getLogger().info("Performing action: Rolling batch restarting {}% of region servers", (int) (ratio * 100)); List selectedServers = selectServers(); - Queue serversToBeSuspended = new LinkedList<>(selectedServers); - Queue suspendedServers = new LinkedList<>(); + Queue serversToBeSuspended = new ArrayDeque<>(selectedServers); + Queue suspendedServers = new ArrayDeque<>(); Random rand = ThreadLocalRandom.current(); // loop while there are servers to be suspended or suspended servers to be resumed while ( diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/SplitAllRegionOfTableAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/SplitAllRegionOfTableAction.java index 06d39ab2a94..b986396b064 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/SplitAllRegionOfTableAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/SplitAllRegionOfTableAction.java @@ -38,6 +38,7 @@ public class SplitAllRegionOfTableAction extends Action { this.tableName = tableName; } + @Override public void init(ActionContext context) throws IOException { super.init(context); this.maxFullTableSplits = getConf().getInt(MAX_SPLIT_KEY, DEFAULT_MAX_SPLITS); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java index b9cf7986d8b..3a0f292e4a4 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceKillAndRebalanceAction.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.chaos.actions; import java.util.ArrayList; import java.util.HashSet; -import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.Set; @@ -66,7 +65,7 @@ public class UnbalanceKillAndRebalanceAction extends Action { @Override public void perform() throws Exception { ClusterMetrics status = this.cluster.getClusterMetrics(); - List victimServers = new LinkedList<>(status.getLiveServerMetrics().keySet()); + List victimServers = new ArrayList<>(status.getLiveServerMetrics().keySet()); Set killedServers = new HashSet<>(); int liveCount = (int) Math.ceil(FRC_SERVERS_THAT_HOARD_AND_LIVE * victimServers.size()); int deadCount = (int) Math.ceil(FRC_SERVERS_THAT_HOARD_AND_DIE * victimServers.size()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java index 7fbe7f5bfdb..e0e7aaa8eb5 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.chaos.actions; import java.util.ArrayList; -import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; @@ -55,7 +54,7 @@ public class UnbalanceRegionsAction extends Action { public void perform() throws Exception { getLogger().info("Unbalancing regions"); ClusterMetrics status = this.cluster.getClusterMetrics(); - List victimServers = new LinkedList<>(status.getLiveServerMetrics().keySet()); + List victimServers = new ArrayList<>(status.getLiveServerMetrics().keySet()); int targetServerCount = (int) Math.ceil(fractionOfServers * victimServers.size()); List targetServers = new ArrayList<>(targetServerCount); Random rand = ThreadLocalRandom.current(); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/ConfigurableSlowDeterministicMonkeyFactory.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/ConfigurableSlowDeterministicMonkeyFactory.java index f2f6775ef91..e71c0465629 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/ConfigurableSlowDeterministicMonkeyFactory.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/ConfigurableSlowDeterministicMonkeyFactory.java @@ -18,12 +18,16 @@ package org.apache.hadoop.hbase.chaos.factories; import java.lang.reflect.Constructor; +import java.util.List; import java.util.function.Function; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.chaos.actions.Action; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; +import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; + public class ConfigurableSlowDeterministicMonkeyFactory extends SlowDeterministicMonkeyFactory { private static final Logger LOG = @@ -32,6 +36,7 @@ public class ConfigurableSlowDeterministicMonkeyFactory extends SlowDeterministi final static String HEAVY_ACTIONS = "heavy.actions"; final static String TABLE_PARAM = "\\$table_name"; + @SuppressWarnings("ImmutableEnumChecker") public enum SupportedTypes { FLOAT(p -> Float.parseFloat(p)), LONG(p -> Long.parseLong(p)), @@ -56,12 +61,13 @@ public class ConfigurableSlowDeterministicMonkeyFactory extends SlowDeterministi return super.getHeavyWeightedActions(); } else { try { - String[] actionClasses = actions.split(";"); - Action[] heavyActions = new Action[actionClasses.length]; - for (int i = 0; i < actionClasses.length; i++) { - heavyActions[i] = instantiateAction(actionClasses[i]); + List actionClasses = Splitter.on(';').splitToList(actions); + Action[] heavyActions = new Action[actionClasses.size()]; + int i = 0; + for (String action : actionClasses) { + heavyActions[i++] = instantiateAction(action); } - LOG.info("Created actions {}", heavyActions); + LOG.info("Created actions {}", (Object[]) heavyActions); // non-varargs call to LOG#info return heavyActions; } catch (Exception e) { LOG.error("Error trying to instantiate heavy actions. Returning null array.", e); @@ -72,10 +78,13 @@ public class ConfigurableSlowDeterministicMonkeyFactory extends SlowDeterministi private Action instantiateAction(String actionString) throws Exception { final String packageName = "org.apache.hadoop.hbase.chaos.actions"; - String[] classAndParams = actionString.split("\\)")[0].split("\\("); - String className = packageName + "." + classAndParams[0]; - String[] params = - classAndParams[1].replaceAll(TABLE_PARAM, tableName.getNameAsString()).split(","); + Iterable classAndParams = + Splitter.on('(').split(Iterables.get(Splitter.on(')').split(actionString), 0)); + String className = packageName + "." + Iterables.get(classAndParams, 0); + String[] params = Splitter.on(',') + .splitToStream( + Iterables.get(classAndParams, 1).replaceAll(TABLE_PARAM, tableName.getNameAsString())) + .toArray(String[]::new); LOG.info("About to instantiate action class: {}; With constructor params: {}", className, params); Class actionClass = (Class) Class.forName(className); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java index 71937d02661..4c2d6774685 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java @@ -167,7 +167,7 @@ public class ChaosMonkeyRunner extends AbstractHBaseTool { return Sets.newHashSet(familyName); } - /* + /** * If caller wants to add config parameters from a file, the path to the conf file can be passed * like this: -c . The file is presumed to have the Configuration file xml format * and is added as a new Resource to the current Configuration. Use this mechanism to set diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java index 4d05e04fa6e..b021de2d73e 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java @@ -124,8 +124,8 @@ public class IntegrationTestRpcClient { } void stopRandomServer() throws Exception { - lock.writeLock().lock(); RpcServer rpcServer = null; + lock.writeLock().lock(); try { if (rpcServers.size() <= minServers) { return; @@ -243,6 +243,7 @@ public class IntegrationTestRpcClient { } @Override + @SuppressWarnings("AssertionFailureIgnored") // intended public void run() { while (running.get()) { boolean isBigPayload = ThreadLocalRandom.current().nextBoolean(); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java index 8427d3d6ea4..7223a1f753e 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java @@ -365,7 +365,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase { taskId = taskId + iteration * numMapTasks; numMapTasks = numMapTasks * numIterations; - long chainId = Math.abs(ThreadLocalRandom.current().nextLong()); + long chainId = ThreadLocalRandom.current().nextLong(Long.MAX_VALUE); chainId = chainId - (chainId % numMapTasks) + taskId; // ensure that chainId is unique per // task and across iterations LongWritable[] keys = new LongWritable[] { new LongWritable(chainId) }; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java index faca5892682..35db989dd69 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java @@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; @@ -60,6 +59,8 @@ import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; + /** * Validate ImportTsv + BulkLoadFiles on a distributed cluster. */ @@ -83,13 +84,13 @@ public class IntegrationTestImportTsv extends Configured implements Tool { private static final long serialVersionUID = 1L; { byte[] family = Bytes.toBytes("d"); - for (String line : simple_tsv.split("\n")) { + for (String line : Splitter.on('\n').split(simple_tsv)) { String[] row = line.split("\t"); byte[] key = Bytes.toBytes(row[0]); long ts = Long.parseLong(row[1]); byte[][] fields = { Bytes.toBytes(row[2]), Bytes.toBytes(row[3]) }; - add(new KeyValue(key, family, fields[0], ts, Type.Put, fields[0])); - add(new KeyValue(key, family, fields[1], ts, Type.Put, fields[1])); + add(new KeyValue(key, family, fields[0], ts, KeyValue.Type.Put, fields[0])); + add(new KeyValue(key, family, fields[1], ts, KeyValue.Type.Put, fields[1])); } } }; @@ -98,10 +99,12 @@ public class IntegrationTestImportTsv extends Configured implements Tool { // JUnit/Maven or by main when run from the CLI. protected static IntegrationTestingUtility util = null; + @Override public Configuration getConf() { return util.getConfiguration(); } + @Override public void setConf(Configuration conf) { LOG.debug("Ignoring setConf call."); } @@ -207,6 +210,7 @@ public class IntegrationTestImportTsv extends Configured implements Tool { LOG.info("testGenerateAndLoad completed successfully."); } + @Override public int run(String[] args) throws Exception { if (args.length != 0) { System.err.println(format("%s [genericOptions]", NAME)); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java index b965a1d06c0..5bdb8e6014a 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableMapReduceUtil.java @@ -88,6 +88,7 @@ public class IntegrationTestTableMapReduceUtil implements Configurable, Tool { return 0; } + @Override public void setConf(Configuration conf) { if (util != null) { throw new IllegalArgumentException( diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java index 9ea81546332..ec11284eb28 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java @@ -41,11 +41,11 @@ import org.slf4j.LoggerFactory; * f2:(null) to be the the same as the row value. * *
- * aaa, f1: => aaa
- * aaa, f2: => aaa
- * aab, f1: => aab
+ * aaa, f1: => aaa
+ * aaa, f2: => aaa
+ * aab, f1: => aab
  * ....
- * zzz, f2: => zzz
+ * zzz, f2: => zzz
  * 
* * Then the test creates a snapshot from this table, and overrides the values in the original table @@ -90,8 +90,6 @@ public class IntegrationTestTableSnapshotInputFormat extends IntegrationTestBase private static final byte[] MAPRED_START_ROW = Bytes.toBytes("aaa"); private static final byte[] MAPRED_END_ROW = Bytes.toBytes("zz{"); // 'z' + 1 => '{' - private IntegrationTestingUtility util; - @Override public void setConf(Configuration conf) { super.setConf(conf); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index f5312f00a51..068eb574659 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -115,6 +115,7 @@ import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; +import org.apache.hbase.thirdparty.com.google.common.util.concurrent.Uninterruptibles; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; import org.apache.hbase.thirdparty.org.apache.commons.cli.GnuParser; import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter; @@ -289,7 +290,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { */ public static final String BIG_FAMILY_VALUE_SIZE_KEY = "generator.big.family.value.size"; - public static enum Counts { + public static enum GeneratorCounts { SUCCESS, TERMINATING, UNDEFINED, @@ -448,6 +449,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { volatile boolean walkersStop; int numWalkers; + final Object flushedLoopsLock = new Object(); volatile List flushedLoops = new ArrayList<>(); List walkers = new ArrayList<>(); @@ -550,9 +552,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { } private void addFlushed(byte[] rowKey) { - synchronized (flushedLoops) { + synchronized (flushedLoopsLock) { flushedLoops.add(Bytes.toLong(rowKey)); - flushedLoops.notifyAll(); + flushedLoopsLock.notifyAll(); } } @@ -597,16 +599,12 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { } private void joinWalkers() { - walkersStop = true; - synchronized (flushedLoops) { - flushedLoops.notifyAll(); + synchronized (flushedLoopsLock) { + walkersStop = true; + flushedLoopsLock.notifyAll(); } for (Thread walker : walkers) { - try { - walker.join(); - } catch (InterruptedException e) { - // no-op - } + Uninterruptibles.joinUninterruptibly(walker); } } @@ -635,7 +633,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { try { walkLoop(node); } catch (IOException e) { - context.getCounter(Counts.IOEXCEPTION).increment(1l); + context.getCounter(GeneratorCounts.IOEXCEPTION).increment(1l); return; } } catch (InterruptedException e) { @@ -651,9 +649,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { } private long selectLoop() throws InterruptedException { - synchronized (flushedLoops) { + synchronized (flushedLoopsLock) { while (flushedLoops.isEmpty() && !walkersStop) { - flushedLoops.wait(); + flushedLoopsLock.wait(); } if (walkersStop) { throw new InterruptedException(); @@ -691,19 +689,17 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { while (numQueries < maxQueries) { numQueries++; byte[] prev = node.prev; - long t1 = EnvironmentEdgeManager.currentTime(); node = getNode(prev, table, node); - long t2 = EnvironmentEdgeManager.currentTime(); if (node == null) { LOG.error("ConcurrentWalker found UNDEFINED NODE: " + Bytes.toStringBinary(prev)); - context.getCounter(Counts.UNDEFINED).increment(1l); + context.getCounter(GeneratorCounts.UNDEFINED).increment(1l); } else if (node.prev.length == NO_KEY.length) { LOG.error( "ConcurrentWalker found TERMINATING NODE: " + Bytes.toStringBinary(node.key)); - context.getCounter(Counts.TERMINATING).increment(1l); + context.getCounter(GeneratorCounts.TERMINATING).increment(1l); } else { // Increment for successful walk - context.getCounter(Counts.SUCCESS).increment(1l); + context.getCounter(GeneratorCounts.SUCCESS).increment(1l); } } table.close(); @@ -875,14 +871,17 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { } if ( - counters.findCounter(Counts.TERMINATING).getValue() > 0 - || counters.findCounter(Counts.UNDEFINED).getValue() > 0 - || counters.findCounter(Counts.IOEXCEPTION).getValue() > 0 + counters.findCounter(GeneratorCounts.TERMINATING).getValue() > 0 + || counters.findCounter(GeneratorCounts.UNDEFINED).getValue() > 0 + || counters.findCounter(GeneratorCounts.IOEXCEPTION).getValue() > 0 ) { LOG.error("Concurrent walker failed to verify during Generation phase"); - LOG.error("TERMINATING nodes: " + counters.findCounter(Counts.TERMINATING).getValue()); - LOG.error("UNDEFINED nodes: " + counters.findCounter(Counts.UNDEFINED).getValue()); - LOG.error("IOEXCEPTION nodes: " + counters.findCounter(Counts.IOEXCEPTION).getValue()); + LOG.error( + "TERMINATING nodes: " + counters.findCounter(GeneratorCounts.TERMINATING).getValue()); + LOG.error( + "UNDEFINED nodes: " + counters.findCounter(GeneratorCounts.UNDEFINED).getValue()); + LOG.error( + "IOEXCEPTION nodes: " + counters.findCounter(GeneratorCounts.IOEXCEPTION).getValue()); return false; } } catch (IOException e) { @@ -1033,16 +1032,17 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { LocatedFileStatus keyFileStatus = iterator.next(); // Skip "_SUCCESS" file. if (keyFileStatus.getPath().getName().startsWith("_")) continue; - result.addAll(readFileToSearch(conf, fs, keyFileStatus)); + result.addAll(readFileToSearch(conf, keyFileStatus)); } } return result; } - private static SortedSet readFileToSearch(final Configuration conf, final FileSystem fs, + private static SortedSet readFileToSearch(final Configuration conf, final LocatedFileStatus keyFileStatus) throws IOException, InterruptedException { SortedSet result = new TreeSet<>(Bytes.BYTES_COMPARATOR); - // Return entries that are flagged Counts.UNDEFINED in the value. Return the row. This is + // Return entries that are flagged VerifyCounts.UNDEFINED in the value. Return the row. This + // is // what is missing. TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID()); try (SequenceFileAsBinaryInputFormat.SequenceFileAsBinaryRecordReader rr = @@ -1053,7 +1053,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { while (rr.nextKeyValue()) { rr.getCurrentKey(); BytesWritable bw = rr.getCurrentValue(); - if (Verify.VerifyReducer.whichType(bw.getBytes()) == Verify.Counts.UNDEFINED) { + if (Verify.VerifyReducer.whichType(bw.getBytes()) == Verify.VerifyCounts.UNDEFINED) { byte[] key = new byte[rr.getCurrentKey().getLength()]; System.arraycopy(rr.getCurrentKey().getBytes(), 0, key, 0, rr.getCurrentKey().getLength()); @@ -1114,7 +1114,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { * Don't change the order of these enums. Their ordinals are used as type flag when we emit * problems found from the reducer. */ - public static enum Counts { + public static enum VerifyCounts { UNREFERENCED, UNDEFINED, REFERENCED, @@ -1133,9 +1133,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { extends Reducer { private ArrayList refs = new ArrayList<>(); private final BytesWritable UNREF = - new BytesWritable(addPrefixFlag(Counts.UNREFERENCED.ordinal(), new byte[] {})); + new BytesWritable(addPrefixFlag(VerifyCounts.UNREFERENCED.ordinal(), new byte[] {})); private final BytesWritable LOSTFAM = - new BytesWritable(addPrefixFlag(Counts.LOST_FAMILIES.ordinal(), new byte[] {})); + new BytesWritable(addPrefixFlag(VerifyCounts.LOST_FAMILIES.ordinal(), new byte[] {})); private AtomicInteger rows = new AtomicInteger(0); private Connection connection; @@ -1177,9 +1177,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { * n * @return Type from the Counts enum of this row. Reads prefix added by * {@link #addPrefixFlag(int, byte[])} */ - public static Counts whichType(final byte[] bs) { + public static VerifyCounts whichType(final byte[] bs) { int ordinal = Bytes.toShort(bs, 0, Bytes.SIZEOF_SHORT); - return Counts.values()[ordinal]; + return VerifyCounts.values()[ordinal]; } /** @@ -1221,7 +1221,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { if (lostFamilies) { String keyString = Bytes.toStringBinary(key.getBytes(), 0, key.getLength()); LOG.error("LinkedListError: key=" + keyString + ", lost big or tiny families"); - context.getCounter(Counts.LOST_FAMILIES).increment(1); + context.getCounter(VerifyCounts.LOST_FAMILIES).increment(1); context.write(key, LOSTFAM); } @@ -1233,11 +1233,11 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { byte[] bs = refs.get(i); int ordinal; if (i <= 0) { - ordinal = Counts.UNDEFINED.ordinal(); + ordinal = VerifyCounts.UNDEFINED.ordinal(); context.write(key, new BytesWritable(addPrefixFlag(ordinal, bs))); - context.getCounter(Counts.UNDEFINED).increment(1); + context.getCounter(VerifyCounts.UNDEFINED).increment(1); } else { - ordinal = Counts.EXTRA_UNDEF_REFERENCES.ordinal(); + ordinal = VerifyCounts.EXTRA_UNDEF_REFERENCES.ordinal(); context.write(key, new BytesWritable(addPrefixFlag(ordinal, bs))); } } @@ -1252,7 +1252,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { } else if (defCount > 0 && refs.isEmpty()) { // node is defined but not referenced context.write(key, UNREF); - context.getCounter(Counts.UNREFERENCED).increment(1); + context.getCounter(VerifyCounts.UNREFERENCED).increment(1); if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) { String keyString = Bytes.toStringBinary(key.getBytes(), 0, key.getLength()); context.getCounter("unref", keyString).increment(1); @@ -1261,13 +1261,13 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { if (refs.size() > 1) { // Skip first reference. for (int i = 1; i < refs.size(); i++) { - context.write(key, - new BytesWritable(addPrefixFlag(Counts.EXTRAREFERENCES.ordinal(), refs.get(i)))); + context.write(key, new BytesWritable( + addPrefixFlag(VerifyCounts.EXTRAREFERENCES.ordinal(), refs.get(i)))); } - context.getCounter(Counts.EXTRAREFERENCES).increment(refs.size() - 1); + context.getCounter(VerifyCounts.EXTRAREFERENCES).increment(refs.size() - 1); } // node is defined and referenced - context.getCounter(Counts.REFERENCED).increment(1); + context.getCounter(VerifyCounts.REFERENCED).increment(1); } } @@ -1275,6 +1275,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { * Dump out extra info around references if there are any. Helps debugging. * @return StringBuilder filled with references if any. n */ + @SuppressWarnings("JavaUtilDate") private StringBuilder dumpExtraInfoOnRefs(final BytesWritable key, final Context context, final List refs) throws IOException { StringBuilder refsSb = null; @@ -1429,8 +1430,8 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { * @return True if the values match what's expected, false otherwise */ protected boolean verifyExpectedValues(long expectedReferenced, Counters counters) { - final Counter referenced = counters.findCounter(Counts.REFERENCED); - final Counter unreferenced = counters.findCounter(Counts.UNREFERENCED); + final Counter referenced = counters.findCounter(VerifyCounts.REFERENCED); + final Counter unreferenced = counters.findCounter(VerifyCounts.UNREFERENCED); boolean success = true; if (expectedReferenced != referenced.getValue()) { @@ -1440,7 +1441,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { } if (unreferenced.getValue() > 0) { - final Counter multiref = counters.findCounter(Counts.EXTRAREFERENCES); + final Counter multiref = counters.findCounter(VerifyCounts.EXTRAREFERENCES); boolean couldBeMultiRef = (multiref.getValue() == unreferenced.getValue()); LOG.error( "Unreferenced nodes were not expected. Unreferenced count=" + unreferenced.getValue() @@ -1457,8 +1458,8 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { * @return True if the "bad" counter objects are 0, false otherwise */ protected boolean verifyUnexpectedValues(Counters counters) { - final Counter undefined = counters.findCounter(Counts.UNDEFINED); - final Counter lostfamilies = counters.findCounter(Counts.LOST_FAMILIES); + final Counter undefined = counters.findCounter(VerifyCounts.UNDEFINED); + final Counter lostfamilies = counters.findCounter(VerifyCounts.LOST_FAMILIES); boolean success = true; if (undefined.getValue() > 0) { @@ -1839,8 +1840,6 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { return node; } - protected IntegrationTestingUtility util; - @Override public void setUpCluster() throws Exception { util = getTestingUtil(getConf()); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index 199ecf81b23..a25a1e05909 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -71,6 +71,7 @@ import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; /** @@ -204,7 +205,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB protected void persist(org.apache.hadoop.mapreduce.Mapper.Context output, long count, byte[][] prev, byte[][] current, byte[] id) throws IOException { String visibilityExps = ""; - String[] split = labels.split(COMMA); + String[] split = Splitter.on(COMMA).splitToStream(labels).toArray(String[]::new); for (int i = 0; i < current.length; i++) { for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) { Put put = new Put(current[i]); @@ -248,18 +249,16 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB } public int runCopier(String outputDir) throws Exception { - Job job = null; - Scan scan = null; - job = new Job(getConf()); + Job job = new Job(getConf()); job.setJobName("Data copier"); job.getConfiguration().setInt("INDEX", labelIndex); job.getConfiguration().set("LABELS", labels); job.setJarByClass(getClass()); - scan = new Scan(); + Scan scan = new Scan(); scan.setCacheBlocks(false); scan.setRaw(true); - String[] split = labels.split(COMMA); + String[] split = Splitter.on(COMMA).splitToStream(labels).toArray(String[]::new); scan.setAuthorizations( new Authorizations(split[this.labelIndex * 2], split[(this.labelIndex * 2) + 1])); if (delete) { @@ -424,7 +423,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB scan.addColumn(FAMILY_NAME, COLUMN_PREV); scan.setCaching(10000); scan.setCacheBlocks(false); - String[] split = labels.split(COMMA); + String[] split = Splitter.on(COMMA).splitToStream(labels).toArray(String[]::new); scan.setAuthorizations( new Authorizations(split[this.labelIndex * 2], split[(this.labelIndex * 2) + 1])); @@ -470,7 +469,6 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB static class VisibilityLoop extends Loop { private static final int SLEEP_IN_MS = 5000; private static final Logger LOG = LoggerFactory.getLogger(VisibilityLoop.class); - IntegrationTestBigLinkedListWithVisibility it; @Override protected void runGenerator(int numMappers, long numNodes, String outputDir, Integer width, @@ -652,10 +650,6 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB @Override public int runTestFromCommandLine() throws Exception { - Tool tool = null; - Loop loop = new VisibilityLoop(); - loop.it = this; - tool = loop; - return ToolRunner.run(getConf(), tool, otherArgs); + return ToolRunner.run(getConf(), new VisibilityLoop(), otherArgs); } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index 33cf6992573..5566bd79cab 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -26,6 +26,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.InterruptedIOException; +import java.nio.charset.StandardCharsets; +import java.util.List; import java.util.Random; import java.util.Set; import java.util.SortedSet; @@ -85,6 +87,8 @@ import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; +import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; @@ -441,34 +445,32 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase { } if (!fs.isDirectory(keysInputDir)) { FileStatus keyFileStatus = fs.getFileStatus(keysInputDir); - readFileToSearch(conf, fs, keyFileStatus, result); + readFileToSearch(fs, keyFileStatus, result); } else { RemoteIterator iterator = fs.listFiles(keysInputDir, false); while (iterator.hasNext()) { LocatedFileStatus keyFileStatus = iterator.next(); // Skip "_SUCCESS" file. if (keyFileStatus.getPath().getName().startsWith("_")) continue; - readFileToSearch(conf, fs, keyFileStatus, result); + readFileToSearch(fs, keyFileStatus, result); } } return result; } - private static SortedSet readFileToSearch(final Configuration conf, final FileSystem fs, + private static SortedSet readFileToSearch(final FileSystem fs, final FileStatus keyFileStatus, SortedSet result) throws IOException, InterruptedException { // verify uses file output format and writes . We can read it as a text file - try (InputStream in = fs.open(keyFileStatus.getPath()); - BufferedReader reader = new BufferedReader(new InputStreamReader(in))) { + try (InputStream in = fs.open(keyFileStatus.getPath()); BufferedReader reader = + new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) { // extract out the key and return that missing as a missing key String line; while ((line = reader.readLine()) != null) { if (line.isEmpty()) continue; - - String[] parts = line.split("\\s+"); - if (parts.length >= 1) { - String key = parts[0]; - result.add(Bytes.toBytesBinary(key)); + List parts = Splitter.onPattern("\\s+").splitToList(line); + if (parts.size() >= 1) { + result.add(Bytes.toBytesBinary(Iterables.get(parts, 0))); } else { LOG.info("Cannot parse key from: " + line); } @@ -477,7 +479,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase { return result; } - private int doSearch(Configuration conf, String keysDir) throws Exception { + private int doSearch(String keysDir) throws Exception { Path inputDir = new Path(keysDir); getConf().set(SEARCHER_INPUTDIR_KEY, inputDir.toString()); @@ -618,7 +620,7 @@ public class IntegrationTestLoadAndVerify extends IntegrationTestBase { } } if (doSearch) { - return doSearch(getConf(), keysDir); + return doSearch(keysDir); } return 0; } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java index 52a88743f55..fab08dbb2ec 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java @@ -98,6 +98,7 @@ import org.apache.hadoop.util.ToolRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; /** @@ -220,12 +221,11 @@ public class IntegrationTestLoadCommonCrawl extends IntegrationTestBase { LOG.error("Loader failed"); return -1; } - res = runVerify(outputDir); + return runVerify(outputDir); } catch (Exception e) { LOG.error("Tool failed with exception", e); return -1; } - return 0; } @Override @@ -527,9 +527,9 @@ public class IntegrationTestLoadCommonCrawl extends IntegrationTestBase { try (FSDataInputStream is = fs.open(warcFileInput)) { InputStreamReader reader; if (warcFileInput.getName().toLowerCase().endsWith(".gz")) { - reader = new InputStreamReader(new GZIPInputStream(is)); + reader = new InputStreamReader(new GZIPInputStream(is), StandardCharsets.UTF_8); } else { - reader = new InputStreamReader(is); + reader = new InputStreamReader(is, StandardCharsets.UTF_8); } try (BufferedReader br = new BufferedReader(reader)) { String line; @@ -949,8 +949,9 @@ public class IntegrationTestLoadCommonCrawl extends IntegrationTestBase { // Reverse the components of the hostname String reversedHost; if (uri.getHost() != null) { + final String[] hostComponents = + Splitter.on('.').splitToStream(uri.getHost()).toArray(String[]::new); final StringBuilder sb = new StringBuilder(); - final String[] hostComponents = uri.getHost().split("\\."); for (int i = hostComponents.length - 1; i >= 0; i--) { sb.append(hostComponents[i]); if (i != 0) { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java index 7c1446dc07f..7bf4b4a95af 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.test; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; @@ -43,6 +44,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Joiner; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; /** @@ -109,7 +111,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { * Wrapper around an HBase ClusterID allowing us to get admin connections and configurations for * it */ - protected class ClusterID { + protected static class ClusterID { private final Configuration configuration; private Connection connection = null; @@ -121,10 +123,10 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { */ public ClusterID(Configuration base, String key) { configuration = new Configuration(base); - String[] parts = key.split(":"); - configuration.set(HConstants.ZOOKEEPER_QUORUM, parts[0]); - configuration.set(HConstants.ZOOKEEPER_CLIENT_PORT, parts[1]); - configuration.set(HConstants.ZOOKEEPER_ZNODE_PARENT, parts[2]); + Iterator iter = Splitter.on(':').split(key).iterator(); + configuration.set(HConstants.ZOOKEEPER_QUORUM, iter.next()); + configuration.set(HConstants.ZOOKEEPER_CLIENT_PORT, iter.next()); + configuration.set(HConstants.ZOOKEEPER_ZNODE_PARENT, iter.next()); } @Override @@ -150,8 +152,20 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { this.connection = null; } - public boolean equals(ClusterID other) { - return this.toString().equalsIgnoreCase(other.toString()); + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + if (!(other instanceof ClusterID)) { + return false; + } + return toString().equalsIgnoreCase(other.toString()); + } + + @Override + public int hashCode() { + return toString().hashCode(); } } @@ -306,7 +320,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { if (!noReplicationSetup) { setupTablesAndReplication(); } - int expectedNumNodes = 0; + long expectedNumNodes = 0; for (int i = 0; i < numIterations; i++) { LOG.info("Starting iteration = " + i); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java index 657e44fefe6..e21f1d5e54e 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java @@ -267,7 +267,8 @@ public class IntegrationTestTimeBoundedRequestsWithRegionReplicas extends Integr try { this.timeoutThread.join(); } catch (InterruptedException e) { - e.printStackTrace(); + // Restore interrupt status + Thread.currentThread().interrupt(); } this.aborted = true; super.waitForFinish(); @@ -293,7 +294,7 @@ public class IntegrationTestTimeBoundedRequestsWithRegionReplicas extends Integr return new TimeBoundedMultiThreadedReaderThread(readerId); } - private class TimeoutThread extends Thread { + private static class TimeoutThread extends Thread { long timeout; long reportInterval = 60000; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java index 859e81759ee..db9a1c40d74 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java @@ -59,6 +59,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.ToolRunner; import org.junit.experimental.categories.Category; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; +import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; /** @@ -119,14 +121,16 @@ public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationT conf.set("hbase.superuser", User.getCurrent().getName()); conf.setBoolean("dfs.permissions", false); super.setUpCluster(); - String[] users = userNames.split(","); - if (users.length != 2) { + List users = Splitter.on(',').splitToList(userNames); + if (users.size() != 2) { System.err.println(ERROR_STR); throw new IOException(ERROR_STR); } - System.out.println(userNames + " " + users[0] + " " + users[1]); - USER1 = User.createUserForTesting(conf, users[0], new String[] {}); - USER2 = User.createUserForTesting(conf, users[1], new String[] {}); + String user1 = Iterables.get(users, 0); + String user2 = Iterables.get(users, 1); + System.out.println(userNames + " " + user1 + " " + user2); + USER1 = User.createUserForTesting(conf, user1, new String[] {}); + USER2 = User.createUserForTesting(conf, user2, new String[] {}); addLabelsAndAuths(); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java index 49ac9f1a239..481dd5ee7e8 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java @@ -28,6 +28,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; +import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.IntegrationTestingUtility; @@ -137,11 +138,9 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool { ht.close(); ht = null; - } catch (IOException e) { - e.printStackTrace(); + } catch (Exception e) { span.addEvent("exception", Attributes.of(AttributeKey.stringKey("exception"), e.getClass().getSimpleName())); - } catch (Exception e) { } finally { span.end(); if (rs != null) { @@ -162,36 +161,36 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool { @Override public void run() { - Table ht = null; try { ht = util.getConnection().getTable(tableName); + long accum = 0; + for (int x = 0; x < 5; x++) { + Span span = TraceUtil.getGlobalTracer().spanBuilder("gets").startSpan(); + try (Scope scope = span.makeCurrent()) { + long rk = rowKeyQueue.take(); + Result r1 = ht.get(new Get(Bytes.toBytes(rk))); + if (r1 != null) { + accum |= Bytes.toLong(r1.getRow()); + } + Result r2 = ht.get(new Get(Bytes.toBytes(rk))); + if (r2 != null) { + accum |= Bytes.toLong(r2.getRow()); + } + span.addEvent("Accum = " + accum); + } catch (IOException | InterruptedException ie) { + // IGNORED + } finally { + span.end(); + } + } } catch (IOException e) { - e.printStackTrace(); - } - - long accum = 0; - for (int x = 0; x < 5; x++) { - Span span = TraceUtil.getGlobalTracer().spanBuilder("gets").startSpan(); - try (Scope scope = span.makeCurrent()) { - long rk = rowKeyQueue.take(); - Result r1 = ht.get(new Get(Bytes.toBytes(rk))); - if (r1 != null) { - accum |= Bytes.toLong(r1.getRow()); - } - Result r2 = ht.get(new Get(Bytes.toBytes(rk))); - if (r2 != null) { - accum |= Bytes.toLong(r2.getRow()); - } - span.addEvent("Accum = " + accum); - - } catch (IOException | InterruptedException ie) { - // IGNORED - } finally { - span.end(); + // IGNORED + } finally { + if (ht != null) { + IOUtils.closeQuietly(ht); } } - } }; service.execute(runnable);