diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index 74f653c0daa..385eb714013 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; @@ -144,9 +145,15 @@ public class HBaseCommonTestingUtility { * @see #getBaseTestDir() */ public Path getRandomDir() { - return new Path(getBaseTestDir(), UUID.randomUUID().toString()); + return new Path(getBaseTestDir(), getRandomUUID().toString()); } + public UUID getRandomUUID() { + return new UUID(ThreadLocalRandom.current().nextLong(), + ThreadLocalRandom.current().nextLong()); + } + + protected void createSubDir(String propertyName, Path parent, String subDirName) { Path newPath = new Path(parent, subDirName); File newDir = new File(newPath.toString()).getAbsoluteFile(); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java index e248e9af975..96010d92ecc 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpToolWithBulkLoadedData.java @@ -26,7 +26,6 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; -import java.util.UUID; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -85,7 +84,7 @@ public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplication Iterator randomHFileRangeListIterator = null; Set randomHFileRanges = new HashSet<>(16); for (int i = 0; i < 16; i++) { - randomHFileRanges.add(UUID.randomUUID().toString()); + randomHFileRanges.add(utility1.getRandomUUID().toString()); } List randomHFileRangeList = new ArrayList<>(randomHFileRanges); Collections.sort(randomHFileRangeList); diff --git a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java index 8e3b71d0773..91cd19ef009 100644 --- a/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java +++ b/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/RandomStringGeneratorImpl.java @@ -20,13 +20,15 @@ package org.apache.hadoop.hbase; import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; public class RandomStringGeneratorImpl implements RandomStringGenerator { private final String s; public RandomStringGeneratorImpl() { - s = UUID.randomUUID().toString(); + s = new UUID(ThreadLocalRandom.current().nextLong(), + ThreadLocalRandom.current().nextLong()).toString(); } @Override diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java index 1b5c9a40e45..1796db74948 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java @@ -264,7 +264,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { */ protected void runGenerator() throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); //create a random UUID. + UUID uuid = util.getRandomUUID(); //create a random UUID. Path generatorOutput = new Path(outputPath, uuid.toString()); Generator generator = new Generator(); @@ -288,7 +288,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList { */ protected void runVerify(long expectedNumNodes) throws Exception { Path outputPath = new Path(outputDir); - UUID uuid = UUID.randomUUID(); //create a random UUID. + UUID uuid = util.getRandomUUID(); //create a random UUID. Path iterationOutput = new Path(outputPath, uuid.toString()); Verify verify = new Verify(); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java index 9951e64ddfd..83a457ff071 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithOperationAttributes.java @@ -25,7 +25,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Optional; -import java.util.UUID; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -123,7 +122,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable { @Test public void testMROnTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { @@ -139,7 +138,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable { @Test public void testMROnTableWithInvalidOperationAttr() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java index aaa495efcc4..b8b6bb81155 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithTTLs.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Optional; -import java.util.UUID; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -111,7 +110,7 @@ public class TestImportTSVWithTTLs implements Configurable { @Test public void testMROnTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java index 2e5f3be21b0..7f4ad9e34ae 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java @@ -27,7 +27,6 @@ import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.UUID; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -161,7 +160,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMROnTable() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { @@ -177,7 +176,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMROnTableWithDeletes() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { @@ -229,7 +228,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMROnTableWithBulkload() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = new String[] { @@ -245,7 +244,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testBulkOutputWithTsvImporterTextMapper() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName table = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); String FAMILY = "FAM"; Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table.getNameAsString()),"hfiles"); // Prepare the arguments required for the test. @@ -266,7 +265,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testMRWithOutputFormat() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = new String[] { @@ -283,7 +282,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testBulkOutputWithInvalidLabels() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = @@ -301,7 +300,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable { @Test public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); + final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java index 68c6b6b4bca..74fdc99a740 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java @@ -29,7 +29,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.UUID; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -118,7 +117,7 @@ public class TestImportTsv implements Configurable { @Before public void setup() throws Exception { - tn = TableName.valueOf("test-" + UUID.randomUUID()); + tn = TableName.valueOf("test-" + util.getRandomUUID()); args = new HashMap<>(); // Prepare the arguments required for the test. args.put(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A,FAM:B"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 7cc933325e3..c99d9191b4d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -49,7 +49,6 @@ import java.util.Properties; import java.util.Random; import java.util.Set; import java.util.TreeSet; -import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @@ -551,7 +550,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { //the working directory, and create a unique sub dir there FileSystem fs = getTestFileSystem(); Path newDataTestDir; - String randomStr = UUID.randomUUID().toString(); + String randomStr = getRandomUUID().toString(); if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) { newDataTestDir = new Path(getDataTestDir(), randomStr); File dataTestDir = new File(newDataTestDir.toString()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java index 9234ea68010..531ba079c4f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseOnOtherDfsCluster.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import java.util.UUID; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.Put; @@ -68,7 +67,7 @@ public class TestHBaseOnOtherDfsCluster { targetFs = FileSystem.get(util2.getConfiguration()); assertFsSameUri(fs, targetFs); - Path randomFile = new Path("/"+UUID.randomUUID()); + Path randomFile = new Path("/"+util1.getRandomUUID()); assertTrue(targetFs.createNewFile(randomFile)); assertTrue(fs.exists(randomFile)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java index c3a6f0c4b70..1603d3ac83a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java @@ -25,7 +25,6 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -144,7 +143,7 @@ public class TestNodeHealthCheckChore { throw new IOException("Failed mkdirs " + tempDir); } } - String scriptName = "HealthScript" + UUID.randomUUID().toString() + String scriptName = "HealthScript" + UTIL.getRandomUUID().toString() + (Shell.WINDOWS ? ".cmd" : ".sh"); healthScriptFile = new File(tempDir.getAbsolutePath(), scriptName); conf.set(HConstants.HEALTH_SCRIPT_LOC, healthScriptFile.getAbsolutePath()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 100df38a441..13fa59f2a51 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -38,7 +38,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NavigableMap; -import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -357,9 +356,9 @@ public class TestFromClientSide { Table ht = TEST_UTIL.createTable(tableName, FAMILIES); String value = "this is the value"; String value2 = "this is some other value"; - String keyPrefix1 = UUID.randomUUID().toString(); - String keyPrefix2 = UUID.randomUUID().toString(); - String keyPrefix3 = UUID.randomUUID().toString(); + String keyPrefix1 = TEST_UTIL.getRandomUUID().toString(); + String keyPrefix2 = TEST_UTIL.getRandomUUID().toString(); + String keyPrefix3 = TEST_UTIL.getRandomUUID().toString(); putRows(ht, 3, value, keyPrefix1); putRows(ht, 3, value, keyPrefix2); putRows(ht, 3, value, keyPrefix3); @@ -449,7 +448,7 @@ public class TestFromClientSide { private void putRows(Table ht, int numRows, String value, String key) throws IOException { for (int i = 0; i < numRows; i++) { - String row = key + "_" + UUID.randomUUID().toString(); + String row = key + "_" + TEST_UTIL.getRandomUUID().toString(); System.out.println(String.format("Saving row: %s, with value %s", row, value)); Put put = new Put(Bytes.toBytes(row)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java index ce0db30e58f..67de106817e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -52,7 +51,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSnapshotWithAcl.class); - public TableName TEST_TABLE = TableName.valueOf(UUID.randomUUID().toString()); + public TableName TEST_TABLE = TableName.valueOf(TEST_UTIL.getRandomUUID().toString()); private static final int ROW_COUNT = 30000; @@ -197,11 +196,11 @@ public class TestSnapshotWithAcl extends SecureTestUtil { loadData(); verifyRows(TEST_TABLE); - String snapshotName1 = UUID.randomUUID().toString(); + String snapshotName1 = TEST_UTIL.getRandomUUID().toString(); admin.snapshot(snapshotName1, TEST_TABLE); // clone snapshot with restoreAcl true. - TableName tableName1 = TableName.valueOf(UUID.randomUUID().toString()); + TableName tableName1 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString()); admin.cloneSnapshot(snapshotName1, tableName1, true); verifyRows(tableName1); verifyAllowed(new AccessReadAction(tableName1), USER_OWNER, USER_RO, USER_RW); @@ -210,7 +209,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil { verifyDenied(new AccessWriteAction(tableName1), USER_RO, USER_NONE); // clone snapshot with restoreAcl false. - TableName tableName2 = TableName.valueOf(UUID.randomUUID().toString()); + TableName tableName2 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString()); admin.cloneSnapshot(snapshotName1, tableName2, false); verifyRows(tableName2); verifyAllowed(new AccessReadAction(tableName2), USER_OWNER); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java index 268fe0015b5..e5743a886e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithClusters.java @@ -299,7 +299,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase { @Override public UUID getPeerUUID() { - return UUID.randomUUID(); + return utility1.getRandomUUID(); } @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index 23fe9054141..5eeecefa487 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -28,7 +28,6 @@ import java.io.DataOutputStream; import java.io.IOException; import java.security.SecureRandom; import java.util.List; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -221,7 +220,8 @@ public class TestHFileEncryption { .build(); // write a new test HFile LOG.info("Writing with " + fileContext); - Path path = new Path(TEST_UTIL.getDataTestDir(), UUID.randomUUID().toString() + ".hfile"); + Path path = new Path(TEST_UTIL.getDataTestDir(), + TEST_UTIL.getRandomUUID().toString() + ".hfile"); FSDataOutputStream out = fs.create(path); HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(out) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java index 917a7713363..e0577f6a2d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java @@ -36,7 +36,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Map; -import java.util.UUID; import java.util.concurrent.atomic.LongAdder; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -124,7 +123,7 @@ public class TestSplitLogManager { conf = TEST_UTIL.getConfiguration(); // Use a different ZK wrapper instance for each tests. zkw = - new ZKWatcher(conf, "split-log-manager-tests" + UUID.randomUUID().toString(), null); + new ZKWatcher(conf, "split-log-manager-tests" + TEST_UTIL.getRandomUUID().toString(), null); master = new DummyMasterServices(zkw, conf); ZKUtil.deleteChildrenRecursively(zkw, zkw.getZNodePaths().baseZNode); @@ -523,7 +522,7 @@ public class TestSplitLogManager { Path logDirPath = new Path(new Path(dir, HConstants.HREGION_LOGDIR_NAME), serverName); fs.mkdirs(logDirPath); // create an empty log file - String logFile = new Path(logDirPath, UUID.randomUUID().toString()).toString(); + String logFile = new Path(logDirPath, TEST_UTIL.getRandomUUID().toString()).toString(); fs.create(new Path(logDirPath, logFile)).close(); // spin up a thread mocking split done. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java index 3eff88a57bb..60422a0cb6e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java @@ -29,7 +29,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; -import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -179,7 +178,7 @@ public class TestSnapshotFileCache { // add a random file to make sure we refresh - FileStatus randomFile = mockStoreFile(UUID.randomUUID().toString()); + FileStatus randomFile = mockStoreFile(UTIL.getRandomUUID().toString()); allStoreFiles.add(randomFile); deletableFiles = cache.getUnreferencedFiles(allStoreFiles, null); assertEquals(randomFile, Iterables.getOnlyElement(deletableFiles)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java index 22a47ba9f8e..ae53ff21feb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobFileName.java @@ -24,8 +24,8 @@ import static org.junit.Assert.assertTrue; import java.util.Date; import java.util.Random; -import java.util.UUID; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.MD5Hash; @@ -41,6 +41,8 @@ public class TestMobFileName { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestMobFileName.class); + private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + private String uuid; private Date date; private String dateStr; @@ -49,7 +51,7 @@ public class TestMobFileName { @Before public void setUp() { Random random = new Random(); - uuid = UUID.randomUUID().toString().replaceAll("-", ""); + uuid = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); date = new Date(); dateStr = MobUtils.formatDate(date); startKey = Bytes.toBytes(random.nextInt()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java index 94c35f485a5..2cf741ed9bd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/compactions/TestPartitionedMobCompactor.java @@ -31,7 +31,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; -import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.RejectedExecutionHandler; @@ -136,8 +135,8 @@ public class TestPartitionedMobCompactor { Path testDir = FSUtils.getRootDir(conf); Path mobTestDir = new Path(testDir, MobConstants.MOB_DIR_NAME); basePath = new Path(new Path(mobTestDir, tableName), family); - mobSuffix = UUID.randomUUID().toString().replaceAll("-", ""); - delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del"; + mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); + delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del"; allFiles.clear(); mobFiles.clear(); delFiles.clear(); @@ -832,8 +831,8 @@ public class TestPartitionedMobCompactor { if (sameStartKey) { // When creating multiple files under one partition, suffix needs to be different. startRow = Bytes.toBytes(startKey); - mobSuffix = UUID.randomUUID().toString().replaceAll("-", ""); - delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del"; + mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); + delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del"; } else { startRow = Bytes.toBytes(startKey + i); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java index 21e7ac19ceb..e0323a0fa1e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestClusterId.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -103,7 +102,7 @@ public class TestClusterId { FSDataOutputStream s = null; try { s = fs.create(filePath); - s.writeUTF(UUID.randomUUID().toString()); + s.writeUTF(TEST_UTIL.getRandomUUID().toString()); } finally { if (s != null) { s.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 1ff6b273d96..d8a2ee869c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -52,7 +52,6 @@ import java.util.Map; import java.util.NavigableMap; import java.util.Objects; import java.util.TreeMap; -import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; @@ -4694,7 +4693,7 @@ public class TestHRegion { // XXX: The spied AsyncFSWAL can not work properly because of a Mockito defect that can not // deal with classes which have a field of an inner class. See discussions in HBASE-15536. walConf.set(WALFactory.WAL_PROVIDER, "filesystem"); - final WALFactory wals = new WALFactory(walConf, UUID.randomUUID().toString()); + final WALFactory wals = new WALFactory(walConf, TEST_UTIL.getRandomUUID().toString()); final WAL wal = spy(wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build())); this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false, tableDurability, wal, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index 89c39beacd9..305e753284d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -41,7 +41,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Random; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; @@ -1643,7 +1642,7 @@ public class TestHRegionReplayEvents { byte[] valueBytes) throws IOException { HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration()); // TODO We need a way to do this without creating files - Path testFile = new Path(testPath, UUID.randomUUID().toString()); + Path testFile = new Path(testPath, TEST_UTIL.getRandomUUID().toString()); FSDataOutputStream out = TEST_UTIL.getTestFileSystem().create(testFile); try { hFileFactory.withOutputStream(out); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java index 5336963dba7..0929e318581 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java @@ -408,7 +408,7 @@ public abstract class AbstractTestFSWAL { } // Add any old cluster id. List clusterIds = new ArrayList<>(1); - clusterIds.add(UUID.randomUUID()); + clusterIds.add(TEST_UTIL.getRandomUUID()); // Now make appends run slow. goslow.set(true); for (int i = 0; i < countPerFamily; i++) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java index 259914e91c4..1b985187289 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/SerialReplicationTestBase.java @@ -72,7 +72,7 @@ public class SerialReplicationTestBase { public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint { - private static final UUID PEER_UUID = UUID.randomUUID(); + private static final UUID PEER_UUID = UTIL.getRandomUUID(); @Override public UUID getPeerUUID() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java index aeed99b5288..5d833cc5a98 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java @@ -401,7 +401,7 @@ public class TestReplicationEndpoint extends TestReplicationBase { } public static class ReplicationEndpointForTest extends BaseReplicationEndpoint { - static UUID uuid = UUID.randomUUID(); + static UUID uuid = utility1.getRandomUUID(); static AtomicInteger contructedCount = new AtomicInteger(); static AtomicInteger startedCount = new AtomicInteger(); static AtomicInteger stoppedCount = new AtomicInteger(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java index 3ef9215792f..bd800a841f8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRaceWhenCreatingReplicationSource.java @@ -82,7 +82,7 @@ public class TestRaceWhenCreatingReplicationSource { public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint { - private static final UUID PEER_UUID = UUID.randomUUID(); + private static final UUID PEER_UUID = UTIL.getRandomUUID(); @Override public UUID getPeerUUID() { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java index e6f07f1dc59..3dc915b6f1f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java @@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.List; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -132,7 +131,7 @@ public class TestAccessControlFilter extends SecureTestUtil { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TABLE); try { @@ -161,7 +160,7 @@ public class TestAccessControlFilter extends SecureTestUtil { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TABLE); try { @@ -189,7 +188,7 @@ public class TestAccessControlFilter extends SecureTestUtil { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TABLE); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java index 96d6b994ffa..9284cc9b3c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java @@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -157,7 +156,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -184,7 +183,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -210,7 +209,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -234,7 +233,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { @@ -262,7 +261,7 @@ public class TestScanEarlyTermination extends SecureTestUtil { @Override public Object run() throws Exception { // force a new RS connection - conf.set("testkey", UUID.randomUUID().toString()); + conf.set("testkey", TEST_UTIL.getRandomUUID().toString()); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(TEST_TABLE.getTableName()); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java index 87d718e1393..fc23d51e2a4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java @@ -29,7 +29,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; @@ -415,7 +414,8 @@ public class BaseTestHBaseFsck { MobFileName mobFileName = MobFileName.create(oldFileName); String startKey = mobFileName.getStartKey(); String date = mobFileName.getDate(); - return MobFileName.create(startKey, date, UUID.randomUUID().toString().replaceAll("-", "")) + return MobFileName.create(startKey, date, + TEST_UTIL.getRandomUUID().toString().replaceAll("-", "")) .getFileName(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java index a862c8c4506..d5c920d50dc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java @@ -28,7 +28,6 @@ import java.io.File; import java.io.IOException; import java.util.List; import java.util.Random; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -249,7 +248,7 @@ public class TestFSUtils { assertEquals(new FsPermission("700"), filePerm); // then that the correct file is created - Path p = new Path("target" + File.separator + UUID.randomUUID().toString()); + Path p = new Path("target" + File.separator + htu.getRandomUUID().toString()); try { FSDataOutputStream out = FSUtils.create(conf, fs, p, filePerm, null); out.close(); @@ -268,7 +267,7 @@ public class TestFSUtils { conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true); FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY); // then that the correct file is created - String file = UUID.randomUUID().toString(); + String file = htu.getRandomUUID().toString(); Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file); Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file); try { @@ -309,7 +308,7 @@ public class TestFSUtils { FileSystem fs = FileSystem.get(conf); Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile"); - String file = UUID.randomUUID().toString(); + String file = htu.getRandomUUID().toString(); Path p = new Path(testDir, file); FSDataOutputStream out = fs.create(p); @@ -323,7 +322,7 @@ public class TestFSUtils { mockEnv.setValue(expect); EnvironmentEdgeManager.injectEdge(mockEnv); try { - String dstFile = UUID.randomUUID().toString(); + String dstFile = htu.getRandomUUID().toString(); Path dst = new Path(testDir , dstFile); assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst)); @@ -369,7 +368,7 @@ public class TestFSUtils { FSUtils.setStoragePolicy(fs, conf, testDir, HConstants.WAL_STORAGE_POLICY, HConstants.DEFAULT_WAL_STORAGE_POLICY); - String file = UUID.randomUUID().toString(); + String file =htu.getRandomUUID().toString(); Path p = new Path(testDir, file); WriteDataToHDFS(fs, p, 4096); // will assert existance before deleting. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java index aa2a7453755..44b2df9769b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals; import java.io.IOException; import java.util.HashSet; import java.util.Set; -import java.util.UUID; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -118,9 +117,9 @@ public class TestFSVisitor { Path familyDir = new Path(regionDir, familyName); fs.mkdirs(familyDir); for (int h = 0; h < 5; ++h) { - String hfileName = UUID.randomUUID().toString().replaceAll("-", ""); - tableHFiles.add(hfileName); - fs.createNewFile(new Path(familyDir, hfileName)); + String hfileName = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); + tableHFiles.add(hfileName); + fs.createNewFile(new Path(familyDir, hfileName)); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java index ece83bc4d43..2ecc70db769 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java @@ -27,6 +27,7 @@ import java.util.List; import java.util.SortedSet; import java.util.UUID; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.ClassRule; @@ -46,7 +47,7 @@ public class TestRegionSplitCalculator { HBaseClassTestRule.forClass(TestRegionSplitCalculator.class); private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class); - + public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); /** * This is range uses a user specified start and end keys. It also has an * extra tiebreaker so that different ranges with the same start/end key pair @@ -59,7 +60,7 @@ public class TestRegionSplitCalculator { SimpleRange(byte[] start, byte[] end) { this.start = start; this.end = end; - this.tiebreaker = UUID.randomUUID(); + this.tiebreaker = TEST_UTIL.getRandomUUID(); } @Override diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java index b755c32c44e..527ddab2595 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import java.io.File; import java.io.IOException; -import java.util.UUID; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -76,7 +75,7 @@ public class HBaseZKTestingUtility extends HBaseCommonTestingUtility { // Using randomUUID ensures that multiple clusters can be launched by // a same test, if it stops & starts them - Path testDir = getDataTestDir("cluster_" + UUID.randomUUID().toString()); + Path testDir = getDataTestDir("cluster_" + getRandomUUID().toString()); clusterTestDir = new File(testDir.toString()).getAbsoluteFile(); // Have it cleaned up on exit boolean b = deleteOnExit();