HBASE-19164: Remove UUID.randomUUID in tests.

Signed-off-by: Mike Drob <mdrob@apache.org>
This commit is contained in:
Sahil Aggarwal 2018-06-27 14:04:59 +05:30 committed by Mike Drob
parent a02581e72f
commit e61507b9a0
No known key found for this signature in database
GPG Key ID: 3E48C0C6EF362B9E
33 changed files with 79 additions and 88 deletions

View File

@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -144,9 +145,15 @@ public class HBaseCommonTestingUtility {
* @see #getBaseTestDir() * @see #getBaseTestDir()
*/ */
public Path getRandomDir() { public Path getRandomDir() {
return new Path(getBaseTestDir(), UUID.randomUUID().toString()); return new Path(getBaseTestDir(), getRandomUUID().toString());
} }
public UUID getRandomUUID() {
return new UUID(ThreadLocalRandom.current().nextLong(),
ThreadLocalRandom.current().nextLong());
}
protected void createSubDir(String propertyName, Path parent, String subDirName) { protected void createSubDir(String propertyName, Path parent, String subDirName) {
Path newPath = new Path(parent, subDirName); Path newPath = new Path(parent, subDirName);
File newDir = new File(newPath.toString()).getAbsoluteFile(); File newDir = new File(newPath.toString()).getAbsoluteFile();

View File

@ -26,7 +26,6 @@ import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -85,7 +84,7 @@ public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplication
Iterator<String> randomHFileRangeListIterator = null; Iterator<String> randomHFileRangeListIterator = null;
Set<String> randomHFileRanges = new HashSet<>(16); Set<String> randomHFileRanges = new HashSet<>(16);
for (int i = 0; i < 16; i++) { for (int i = 0; i < 16; i++) {
randomHFileRanges.add(UUID.randomUUID().toString()); randomHFileRanges.add(utility1.getRandomUUID().toString());
} }
List<String> randomHFileRangeList = new ArrayList<>(randomHFileRanges); List<String> randomHFileRangeList = new ArrayList<>(randomHFileRanges);
Collections.sort(randomHFileRangeList); Collections.sort(randomHFileRangeList);

View File

@ -20,13 +20,15 @@ package org.apache.hadoop.hbase;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
public class RandomStringGeneratorImpl implements RandomStringGenerator { public class RandomStringGeneratorImpl implements RandomStringGenerator {
private final String s; private final String s;
public RandomStringGeneratorImpl() { public RandomStringGeneratorImpl() {
s = UUID.randomUUID().toString(); s = new UUID(ThreadLocalRandom.current().nextLong(),
ThreadLocalRandom.current().nextLong()).toString();
} }
@Override @Override

View File

@ -264,7 +264,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
*/ */
protected void runGenerator() throws Exception { protected void runGenerator() throws Exception {
Path outputPath = new Path(outputDir); Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID. UUID uuid = util.getRandomUUID(); //create a random UUID.
Path generatorOutput = new Path(outputPath, uuid.toString()); Path generatorOutput = new Path(outputPath, uuid.toString());
Generator generator = new Generator(); Generator generator = new Generator();
@ -288,7 +288,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
*/ */
protected void runVerify(long expectedNumNodes) throws Exception { protected void runVerify(long expectedNumNodes) throws Exception {
Path outputPath = new Path(outputDir); Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID. UUID uuid = util.getRandomUUID(); //create a random UUID.
Path iterationOutput = new Path(outputPath, uuid.toString()); Path iterationOutput = new Path(outputPath, uuid.toString());
Verify verify = new Verify(); Verify verify = new Verify();

View File

@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -123,7 +122,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
@Test @Test
public void testMROnTable() throws Exception { public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = new String[] { String[] args = new String[] {
@ -139,7 +138,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
@Test @Test
public void testMROnTableWithInvalidOperationAttr() throws Exception { public void testMROnTableWithInvalidOperationAttr() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = new String[] { String[] args = new String[] {

View File

@ -24,7 +24,6 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -111,7 +110,7 @@ public class TestImportTSVWithTTLs implements Configurable {
@Test @Test
public void testMROnTable() throws Exception { public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = new String[] { String[] args = new String[] {

View File

@ -27,7 +27,6 @@ import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -161,7 +160,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test @Test
public void testMROnTable() throws Exception { public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = new String[] { String[] args = new String[] {
@ -177,7 +176,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test @Test
public void testMROnTableWithDeletes() throws Exception { public void testMROnTableWithDeletes() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = new String[] { String[] args = new String[] {
@ -229,7 +228,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test @Test
public void testMROnTableWithBulkload() throws Exception { public void testMROnTableWithBulkload() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = new String[] { String[] args = new String[] {
@ -245,7 +244,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test @Test
public void testBulkOutputWithTsvImporterTextMapper() throws Exception { public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
final TableName table = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName table = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
String FAMILY = "FAM"; String FAMILY = "FAM";
Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table.getNameAsString()),"hfiles"); Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table.getNameAsString()),"hfiles");
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
@ -266,7 +265,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test @Test
public void testMRWithOutputFormat() throws Exception { public void testMRWithOutputFormat() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = new String[] { String[] args = new String[] {
@ -283,7 +282,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test @Test
public void testBulkOutputWithInvalidLabels() throws Exception { public void testBulkOutputWithInvalidLabels() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = String[] args =
@ -301,7 +300,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test @Test
public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception { public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID()); final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
String[] args = String[] args =

View File

@ -29,7 +29,6 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -118,7 +117,7 @@ public class TestImportTsv implements Configurable {
@Before @Before
public void setup() throws Exception { public void setup() throws Exception {
tn = TableName.valueOf("test-" + UUID.randomUUID()); tn = TableName.valueOf("test-" + util.getRandomUUID());
args = new HashMap<>(); args = new HashMap<>();
// Prepare the arguments required for the test. // Prepare the arguments required for the test.
args.put(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A,FAM:B"); args.put(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A,FAM:B");

View File

@ -49,7 +49,6 @@ import java.util.Properties;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -551,7 +550,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
//the working directory, and create a unique sub dir there //the working directory, and create a unique sub dir there
FileSystem fs = getTestFileSystem(); FileSystem fs = getTestFileSystem();
Path newDataTestDir; Path newDataTestDir;
String randomStr = UUID.randomUUID().toString(); String randomStr = getRandomUUID().toString();
if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) { if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) {
newDataTestDir = new Path(getDataTestDir(), randomStr); newDataTestDir = new Path(getDataTestDir(), randomStr);
File dataTestDir = new File(newDataTestDir.toString()); File dataTestDir = new File(newDataTestDir.toString());

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.util.UUID;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -68,7 +67,7 @@ public class TestHBaseOnOtherDfsCluster {
targetFs = FileSystem.get(util2.getConfiguration()); targetFs = FileSystem.get(util2.getConfiguration());
assertFsSameUri(fs, targetFs); assertFsSameUri(fs, targetFs);
Path randomFile = new Path("/"+UUID.randomUUID()); Path randomFile = new Path("/"+util1.getRandomUUID());
assertTrue(targetFs.createNewFile(randomFile)); assertTrue(targetFs.createNewFile(randomFile));
assertTrue(fs.exists(randomFile)); assertTrue(fs.exists(randomFile));

View File

@ -25,7 +25,6 @@ import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -144,7 +143,7 @@ public class TestNodeHealthCheckChore {
throw new IOException("Failed mkdirs " + tempDir); throw new IOException("Failed mkdirs " + tempDir);
} }
} }
String scriptName = "HealthScript" + UUID.randomUUID().toString() String scriptName = "HealthScript" + UTIL.getRandomUUID().toString()
+ (Shell.WINDOWS ? ".cmd" : ".sh"); + (Shell.WINDOWS ? ".cmd" : ".sh");
healthScriptFile = new File(tempDir.getAbsolutePath(), scriptName); healthScriptFile = new File(tempDir.getAbsolutePath(), scriptName);
conf.set(HConstants.HEALTH_SCRIPT_LOC, healthScriptFile.getAbsolutePath()); conf.set(HConstants.HEALTH_SCRIPT_LOC, healthScriptFile.getAbsolutePath());

View File

@ -38,7 +38,6 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.UUID;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
@ -357,9 +356,9 @@ public class TestFromClientSide {
Table ht = TEST_UTIL.createTable(tableName, FAMILIES); Table ht = TEST_UTIL.createTable(tableName, FAMILIES);
String value = "this is the value"; String value = "this is the value";
String value2 = "this is some other value"; String value2 = "this is some other value";
String keyPrefix1 = UUID.randomUUID().toString(); String keyPrefix1 = TEST_UTIL.getRandomUUID().toString();
String keyPrefix2 = UUID.randomUUID().toString(); String keyPrefix2 = TEST_UTIL.getRandomUUID().toString();
String keyPrefix3 = UUID.randomUUID().toString(); String keyPrefix3 = TEST_UTIL.getRandomUUID().toString();
putRows(ht, 3, value, keyPrefix1); putRows(ht, 3, value, keyPrefix1);
putRows(ht, 3, value, keyPrefix2); putRows(ht, 3, value, keyPrefix2);
putRows(ht, 3, value, keyPrefix3); putRows(ht, 3, value, keyPrefix3);
@ -449,7 +448,7 @@ public class TestFromClientSide {
private void putRows(Table ht, int numRows, String value, String key) private void putRows(Table ht, int numRows, String value, String key)
throws IOException { throws IOException {
for (int i = 0; i < numRows; i++) { for (int i = 0; i < numRows; i++) {
String row = key + "_" + UUID.randomUUID().toString(); String row = key + "_" + TEST_UTIL.getRandomUUID().toString();
System.out.println(String.format("Saving row: %s, with value %s", row, System.out.println(String.format("Saving row: %s, with value %s", row,
value)); value));
Put put = new Put(Bytes.toBytes(row)); Put put = new Put(Bytes.toBytes(row));

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import java.io.IOException; import java.io.IOException;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -52,7 +51,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil {
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSnapshotWithAcl.class); HBaseClassTestRule.forClass(TestSnapshotWithAcl.class);
public TableName TEST_TABLE = TableName.valueOf(UUID.randomUUID().toString()); public TableName TEST_TABLE = TableName.valueOf(TEST_UTIL.getRandomUUID().toString());
private static final int ROW_COUNT = 30000; private static final int ROW_COUNT = 30000;
@ -197,11 +196,11 @@ public class TestSnapshotWithAcl extends SecureTestUtil {
loadData(); loadData();
verifyRows(TEST_TABLE); verifyRows(TEST_TABLE);
String snapshotName1 = UUID.randomUUID().toString(); String snapshotName1 = TEST_UTIL.getRandomUUID().toString();
admin.snapshot(snapshotName1, TEST_TABLE); admin.snapshot(snapshotName1, TEST_TABLE);
// clone snapshot with restoreAcl true. // clone snapshot with restoreAcl true.
TableName tableName1 = TableName.valueOf(UUID.randomUUID().toString()); TableName tableName1 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString());
admin.cloneSnapshot(snapshotName1, tableName1, true); admin.cloneSnapshot(snapshotName1, tableName1, true);
verifyRows(tableName1); verifyRows(tableName1);
verifyAllowed(new AccessReadAction(tableName1), USER_OWNER, USER_RO, USER_RW); verifyAllowed(new AccessReadAction(tableName1), USER_OWNER, USER_RO, USER_RW);
@ -210,7 +209,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil {
verifyDenied(new AccessWriteAction(tableName1), USER_RO, USER_NONE); verifyDenied(new AccessWriteAction(tableName1), USER_RO, USER_NONE);
// clone snapshot with restoreAcl false. // clone snapshot with restoreAcl false.
TableName tableName2 = TableName.valueOf(UUID.randomUUID().toString()); TableName tableName2 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString());
admin.cloneSnapshot(snapshotName1, tableName2, false); admin.cloneSnapshot(snapshotName1, tableName2, false);
verifyRows(tableName2); verifyRows(tableName2);
verifyAllowed(new AccessReadAction(tableName2), USER_OWNER); verifyAllowed(new AccessReadAction(tableName2), USER_OWNER);

View File

@ -299,7 +299,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase {
@Override @Override
public UUID getPeerUUID() { public UUID getPeerUUID() {
return UUID.randomUUID(); return utility1.getRandomUUID();
} }
@Override @Override

View File

@ -28,7 +28,6 @@ import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.security.SecureRandom; import java.security.SecureRandom;
import java.util.List; import java.util.List;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -221,7 +220,8 @@ public class TestHFileEncryption {
.build(); .build();
// write a new test HFile // write a new test HFile
LOG.info("Writing with " + fileContext); LOG.info("Writing with " + fileContext);
Path path = new Path(TEST_UTIL.getDataTestDir(), UUID.randomUUID().toString() + ".hfile"); Path path = new Path(TEST_UTIL.getDataTestDir(),
TEST_UTIL.getRandomUUID().toString() + ".hfile");
FSDataOutputStream out = fs.create(path); FSDataOutputStream out = fs.create(path);
HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf) HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf)
.withOutputStream(out) .withOutputStream(out)

View File

@ -36,7 +36,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.atomic.LongAdder;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -124,7 +123,7 @@ public class TestSplitLogManager {
conf = TEST_UTIL.getConfiguration(); conf = TEST_UTIL.getConfiguration();
// Use a different ZK wrapper instance for each tests. // Use a different ZK wrapper instance for each tests.
zkw = zkw =
new ZKWatcher(conf, "split-log-manager-tests" + UUID.randomUUID().toString(), null); new ZKWatcher(conf, "split-log-manager-tests" + TEST_UTIL.getRandomUUID().toString(), null);
master = new DummyMasterServices(zkw, conf); master = new DummyMasterServices(zkw, conf);
ZKUtil.deleteChildrenRecursively(zkw, zkw.getZNodePaths().baseZNode); ZKUtil.deleteChildrenRecursively(zkw, zkw.getZNodePaths().baseZNode);
@ -523,7 +522,7 @@ public class TestSplitLogManager {
Path logDirPath = new Path(new Path(dir, HConstants.HREGION_LOGDIR_NAME), serverName); Path logDirPath = new Path(new Path(dir, HConstants.HREGION_LOGDIR_NAME), serverName);
fs.mkdirs(logDirPath); fs.mkdirs(logDirPath);
// create an empty log file // create an empty log file
String logFile = new Path(logDirPath, UUID.randomUUID().toString()).toString(); String logFile = new Path(logDirPath, TEST_UTIL.getRandomUUID().toString()).toString();
fs.create(new Path(logDirPath, logFile)).close(); fs.create(new Path(logDirPath, logFile)).close();
// spin up a thread mocking split done. // spin up a thread mocking split done.

View File

@ -29,7 +29,6 @@ import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -179,7 +178,7 @@ public class TestSnapshotFileCache {
// add a random file to make sure we refresh // add a random file to make sure we refresh
FileStatus randomFile = mockStoreFile(UUID.randomUUID().toString()); FileStatus randomFile = mockStoreFile(UTIL.getRandomUUID().toString());
allStoreFiles.add(randomFile); allStoreFiles.add(randomFile);
deletableFiles = cache.getUnreferencedFiles(allStoreFiles, null); deletableFiles = cache.getUnreferencedFiles(allStoreFiles, null);
assertEquals(randomFile, Iterables.getOnlyElement(deletableFiles)); assertEquals(randomFile, Iterables.getOnlyElement(deletableFiles));

View File

@ -24,8 +24,8 @@ import static org.junit.Assert.assertTrue;
import java.util.Date; import java.util.Date;
import java.util.Random; import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.MD5Hash; import org.apache.hadoop.hbase.util.MD5Hash;
@ -41,6 +41,8 @@ public class TestMobFileName {
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMobFileName.class); HBaseClassTestRule.forClass(TestMobFileName.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private String uuid; private String uuid;
private Date date; private Date date;
private String dateStr; private String dateStr;
@ -49,7 +51,7 @@ public class TestMobFileName {
@Before @Before
public void setUp() { public void setUp() {
Random random = new Random(); Random random = new Random();
uuid = UUID.randomUUID().toString().replaceAll("-", ""); uuid = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
date = new Date(); date = new Date();
dateStr = MobUtils.formatDate(date); dateStr = MobUtils.formatDate(date);
startKey = Bytes.toBytes(random.nextInt()); startKey = Bytes.toBytes(random.nextInt());

View File

@ -31,7 +31,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Random; import java.util.Random;
import java.util.UUID;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.RejectedExecutionHandler;
@ -136,8 +135,8 @@ public class TestPartitionedMobCompactor {
Path testDir = FSUtils.getRootDir(conf); Path testDir = FSUtils.getRootDir(conf);
Path mobTestDir = new Path(testDir, MobConstants.MOB_DIR_NAME); Path mobTestDir = new Path(testDir, MobConstants.MOB_DIR_NAME);
basePath = new Path(new Path(mobTestDir, tableName), family); basePath = new Path(new Path(mobTestDir, tableName), family);
mobSuffix = UUID.randomUUID().toString().replaceAll("-", ""); mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del"; delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del";
allFiles.clear(); allFiles.clear();
mobFiles.clear(); mobFiles.clear();
delFiles.clear(); delFiles.clear();
@ -832,8 +831,8 @@ public class TestPartitionedMobCompactor {
if (sameStartKey) { if (sameStartKey) {
// When creating multiple files under one partition, suffix needs to be different. // When creating multiple files under one partition, suffix needs to be different.
startRow = Bytes.toBytes(startKey); startRow = Bytes.toBytes(startKey);
mobSuffix = UUID.randomUUID().toString().replaceAll("-", ""); mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del"; delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del";
} else { } else {
startRow = Bytes.toBytes(startKey + i); startRow = Bytes.toBytes(startKey + i);
} }

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -103,7 +102,7 @@ public class TestClusterId {
FSDataOutputStream s = null; FSDataOutputStream s = null;
try { try {
s = fs.create(filePath); s = fs.create(filePath);
s.writeUTF(UUID.randomUUID().toString()); s.writeUTF(TEST_UTIL.getRandomUUID().toString());
} finally { } finally {
if (s != null) { if (s != null) {
s.close(); s.close();

View File

@ -52,7 +52,6 @@ import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.Objects; import java.util.Objects;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
@ -4694,7 +4693,7 @@ public class TestHRegion {
// XXX: The spied AsyncFSWAL can not work properly because of a Mockito defect that can not // XXX: The spied AsyncFSWAL can not work properly because of a Mockito defect that can not
// deal with classes which have a field of an inner class. See discussions in HBASE-15536. // deal with classes which have a field of an inner class. See discussions in HBASE-15536.
walConf.set(WALFactory.WAL_PROVIDER, "filesystem"); walConf.set(WALFactory.WAL_PROVIDER, "filesystem");
final WALFactory wals = new WALFactory(walConf, UUID.randomUUID().toString()); final WALFactory wals = new WALFactory(walConf, TEST_UTIL.getRandomUUID().toString());
final WAL wal = spy(wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build())); final WAL wal = spy(wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build()));
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW, this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, tableDurability, wal, HConstants.EMPTY_END_ROW, false, tableDurability, wal,

View File

@ -41,7 +41,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Random; import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -1643,7 +1642,7 @@ public class TestHRegionReplayEvents {
byte[] valueBytes) throws IOException { byte[] valueBytes) throws IOException {
HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration()); HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration());
// TODO We need a way to do this without creating files // TODO We need a way to do this without creating files
Path testFile = new Path(testPath, UUID.randomUUID().toString()); Path testFile = new Path(testPath, TEST_UTIL.getRandomUUID().toString());
FSDataOutputStream out = TEST_UTIL.getTestFileSystem().create(testFile); FSDataOutputStream out = TEST_UTIL.getTestFileSystem().create(testFile);
try { try {
hFileFactory.withOutputStream(out); hFileFactory.withOutputStream(out);

View File

@ -408,7 +408,7 @@ public abstract class AbstractTestFSWAL {
} }
// Add any old cluster id. // Add any old cluster id.
List<UUID> clusterIds = new ArrayList<>(1); List<UUID> clusterIds = new ArrayList<>(1);
clusterIds.add(UUID.randomUUID()); clusterIds.add(TEST_UTIL.getRandomUUID());
// Now make appends run slow. // Now make appends run slow.
goslow.set(true); goslow.set(true);
for (int i = 0; i < countPerFamily; i++) { for (int i = 0; i < countPerFamily; i++) {

View File

@ -72,7 +72,7 @@ public class SerialReplicationTestBase {
public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint { public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint {
private static final UUID PEER_UUID = UUID.randomUUID(); private static final UUID PEER_UUID = UTIL.getRandomUUID();
@Override @Override
public UUID getPeerUUID() { public UUID getPeerUUID() {

View File

@ -401,7 +401,7 @@ public class TestReplicationEndpoint extends TestReplicationBase {
} }
public static class ReplicationEndpointForTest extends BaseReplicationEndpoint { public static class ReplicationEndpointForTest extends BaseReplicationEndpoint {
static UUID uuid = UUID.randomUUID(); static UUID uuid = utility1.getRandomUUID();
static AtomicInteger contructedCount = new AtomicInteger(); static AtomicInteger contructedCount = new AtomicInteger();
static AtomicInteger startedCount = new AtomicInteger(); static AtomicInteger startedCount = new AtomicInteger();
static AtomicInteger stoppedCount = new AtomicInteger(); static AtomicInteger stoppedCount = new AtomicInteger();

View File

@ -82,7 +82,7 @@ public class TestRaceWhenCreatingReplicationSource {
public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint { public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint {
private static final UUID PEER_UUID = UUID.randomUUID(); private static final UUID PEER_UUID = UTIL.getRandomUUID();
@Override @Override
public UUID getPeerUUID() { public UUID getPeerUUID() {

View File

@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -132,7 +131,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
public Object run() throws Exception { public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TABLE); Table t = connection.getTable(TABLE);
try { try {
@ -161,7 +160,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
public Object run() throws Exception { public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TABLE); Table t = connection.getTable(TABLE);
try { try {
@ -189,7 +188,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
public Object run() throws Exception { public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TABLE); Table t = connection.getTable(TABLE);
try { try {

View File

@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -157,7 +156,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName()); Table t = connection.getTable(TEST_TABLE.getTableName());
try { try {
@ -184,7 +183,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName()); Table t = connection.getTable(TEST_TABLE.getTableName());
try { try {
@ -210,7 +209,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName()); Table t = connection.getTable(TEST_TABLE.getTableName());
try { try {
@ -234,7 +233,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName()); Table t = connection.getTable(TEST_TABLE.getTableName());
try { try {
@ -262,7 +261,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
// force a new RS connection // force a new RS connection
conf.set("testkey", UUID.randomUUID().toString()); conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf); Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName()); Table t = connection.getTable(TEST_TABLE.getTableName());
try { try {

View File

@ -29,7 +29,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ScheduledThreadPoolExecutor;
@ -415,7 +414,8 @@ public class BaseTestHBaseFsck {
MobFileName mobFileName = MobFileName.create(oldFileName); MobFileName mobFileName = MobFileName.create(oldFileName);
String startKey = mobFileName.getStartKey(); String startKey = mobFileName.getStartKey();
String date = mobFileName.getDate(); String date = mobFileName.getDate();
return MobFileName.create(startKey, date, UUID.randomUUID().toString().replaceAll("-", "")) return MobFileName.create(startKey, date,
TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""))
.getFileName(); .getFileName();
} }

View File

@ -28,7 +28,6 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -249,7 +248,7 @@ public class TestFSUtils {
assertEquals(new FsPermission("700"), filePerm); assertEquals(new FsPermission("700"), filePerm);
// then that the correct file is created // then that the correct file is created
Path p = new Path("target" + File.separator + UUID.randomUUID().toString()); Path p = new Path("target" + File.separator + htu.getRandomUUID().toString());
try { try {
FSDataOutputStream out = FSUtils.create(conf, fs, p, filePerm, null); FSDataOutputStream out = FSUtils.create(conf, fs, p, filePerm, null);
out.close(); out.close();
@ -268,7 +267,7 @@ public class TestFSUtils {
conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true); conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);
FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY); FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY);
// then that the correct file is created // then that the correct file is created
String file = UUID.randomUUID().toString(); String file = htu.getRandomUUID().toString();
Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file); Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file);
Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file); Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file);
try { try {
@ -309,7 +308,7 @@ public class TestFSUtils {
FileSystem fs = FileSystem.get(conf); FileSystem fs = FileSystem.get(conf);
Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile"); Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile");
String file = UUID.randomUUID().toString(); String file = htu.getRandomUUID().toString();
Path p = new Path(testDir, file); Path p = new Path(testDir, file);
FSDataOutputStream out = fs.create(p); FSDataOutputStream out = fs.create(p);
@ -323,7 +322,7 @@ public class TestFSUtils {
mockEnv.setValue(expect); mockEnv.setValue(expect);
EnvironmentEdgeManager.injectEdge(mockEnv); EnvironmentEdgeManager.injectEdge(mockEnv);
try { try {
String dstFile = UUID.randomUUID().toString(); String dstFile = htu.getRandomUUID().toString();
Path dst = new Path(testDir , dstFile); Path dst = new Path(testDir , dstFile);
assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst)); assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst));
@ -369,7 +368,7 @@ public class TestFSUtils {
FSUtils.setStoragePolicy(fs, conf, testDir, HConstants.WAL_STORAGE_POLICY, FSUtils.setStoragePolicy(fs, conf, testDir, HConstants.WAL_STORAGE_POLICY,
HConstants.DEFAULT_WAL_STORAGE_POLICY); HConstants.DEFAULT_WAL_STORAGE_POLICY);
String file = UUID.randomUUID().toString(); String file =htu.getRandomUUID().toString();
Path p = new Path(testDir, file); Path p = new Path(testDir, file);
WriteDataToHDFS(fs, p, 4096); WriteDataToHDFS(fs, p, 4096);
// will assert existance before deleting. // will assert existance before deleting.

View File

@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException; import java.io.IOException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -118,7 +117,7 @@ public class TestFSVisitor {
Path familyDir = new Path(regionDir, familyName); Path familyDir = new Path(regionDir, familyName);
fs.mkdirs(familyDir); fs.mkdirs(familyDir);
for (int h = 0; h < 5; ++h) { for (int h = 0; h < 5; ++h) {
String hfileName = UUID.randomUUID().toString().replaceAll("-", ""); String hfileName = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
tableHFiles.add(hfileName); tableHFiles.add(hfileName);
fs.createNewFile(new Path(familyDir, hfileName)); fs.createNewFile(new Path(familyDir, hfileName));
} }

View File

@ -27,6 +27,7 @@ import java.util.List;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.UUID; import java.util.UUID;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.ClassRule; import org.junit.ClassRule;
@ -46,7 +47,7 @@ public class TestRegionSplitCalculator {
HBaseClassTestRule.forClass(TestRegionSplitCalculator.class); HBaseClassTestRule.forClass(TestRegionSplitCalculator.class);
private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class); private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class);
public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
/** /**
* This is range uses a user specified start and end keys. It also has an * This is range uses a user specified start and end keys. It also has an
* extra tiebreaker so that different ranges with the same start/end key pair * extra tiebreaker so that different ranges with the same start/end key pair
@ -59,7 +60,7 @@ public class TestRegionSplitCalculator {
SimpleRange(byte[] start, byte[] end) { SimpleRange(byte[] start, byte[] end) {
this.start = start; this.start = start;
this.end = end; this.end = end;
this.tiebreaker = UUID.randomUUID(); this.tiebreaker = TEST_UTIL.getRandomUUID();
} }
@Override @Override

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hbase;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -76,7 +75,7 @@ public class HBaseZKTestingUtility extends HBaseCommonTestingUtility {
// Using randomUUID ensures that multiple clusters can be launched by // Using randomUUID ensures that multiple clusters can be launched by
// a same test, if it stops & starts them // a same test, if it stops & starts them
Path testDir = getDataTestDir("cluster_" + UUID.randomUUID().toString()); Path testDir = getDataTestDir("cluster_" + getRandomUUID().toString());
clusterTestDir = new File(testDir.toString()).getAbsoluteFile(); clusterTestDir = new File(testDir.toString()).getAbsoluteFile();
// Have it cleaned up on exit // Have it cleaned up on exit
boolean b = deleteOnExit(); boolean b = deleteOnExit();