HBASE-19164: Remove UUID.randomUUID in tests.

Signed-off-by: Mike Drob <mdrob@apache.org>
This commit is contained in:
Sahil Aggarwal 2018-06-27 14:04:59 +05:30 committed by Mike Drob
parent a68dbde941
commit 952bb96c8a
No known key found for this signature in database
GPG Key ID: 3E48C0C6EF362B9E
33 changed files with 79 additions and 88 deletions

View File

@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
@ -144,9 +145,15 @@ public class HBaseCommonTestingUtility {
* @see #getBaseTestDir()
*/
public Path getRandomDir() {
return new Path(getBaseTestDir(), UUID.randomUUID().toString());
return new Path(getBaseTestDir(), getRandomUUID().toString());
}
public UUID getRandomUUID() {
return new UUID(ThreadLocalRandom.current().nextLong(),
ThreadLocalRandom.current().nextLong());
}
protected void createSubDir(String propertyName, Path parent, String subDirName) {
Path newPath = new Path(parent, subDirName);
File newDir = new File(newPath.toString()).getAbsoluteFile();

View File

@ -26,7 +26,6 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -85,7 +84,7 @@ public class TestReplicationSyncUpToolWithBulkLoadedData extends TestReplication
Iterator<String> randomHFileRangeListIterator = null;
Set<String> randomHFileRanges = new HashSet<>(16);
for (int i = 0; i < 16; i++) {
randomHFileRanges.add(UUID.randomUUID().toString());
randomHFileRanges.add(utility1.getRandomUUID().toString());
}
List<String> randomHFileRangeList = new ArrayList<>(randomHFileRanges);
Collections.sort(randomHFileRangeList);

View File

@ -20,13 +20,15 @@ package org.apache.hadoop.hbase;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
public class RandomStringGeneratorImpl implements RandomStringGenerator {
private final String s;
public RandomStringGeneratorImpl() {
s = UUID.randomUUID().toString();
s = new UUID(ThreadLocalRandom.current().nextLong(),
ThreadLocalRandom.current().nextLong()).toString();
}
@Override

View File

@ -264,7 +264,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
*/
protected void runGenerator() throws Exception {
Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID.
UUID uuid = util.getRandomUUID(); //create a random UUID.
Path generatorOutput = new Path(outputPath, uuid.toString());
Generator generator = new Generator();
@ -288,7 +288,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
*/
protected void runVerify(long expectedNumNodes) throws Exception {
Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID.
UUID uuid = util.getRandomUUID(); //create a random UUID.
Path iterationOutput = new Path(outputPath, uuid.toString());
Verify verify = new Verify();

View File

@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -123,7 +122,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
@Test
public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
@ -139,7 +138,7 @@ public class TestImportTSVWithOperationAttributes implements Configurable {
@Test
public void testMROnTableWithInvalidOperationAttr() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {

View File

@ -24,7 +24,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -111,7 +110,7 @@ public class TestImportTSVWithTTLs implements Configurable {
@Test
public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {

View File

@ -27,7 +27,6 @@ import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -161,7 +160,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test
public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
@ -177,7 +176,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test
public void testMROnTableWithDeletes() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
@ -229,7 +228,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test
public void testMROnTableWithBulkload() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] {
@ -245,7 +244,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test
public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
final TableName table = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName table = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
String FAMILY = "FAM";
Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table.getNameAsString()),"hfiles");
// Prepare the arguments required for the test.
@ -266,7 +265,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test
public void testMRWithOutputFormat() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] {
@ -283,7 +282,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test
public void testBulkOutputWithInvalidLabels() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args =
@ -301,7 +300,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
@Test
public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + UUID.randomUUID());
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args =

View File

@ -29,7 +29,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -118,7 +117,7 @@ public class TestImportTsv implements Configurable {
@Before
public void setup() throws Exception {
tn = TableName.valueOf("test-" + UUID.randomUUID());
tn = TableName.valueOf("test-" + util.getRandomUUID());
args = new HashMap<>();
// Prepare the arguments required for the test.
args.put(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A,FAM:B");

View File

@ -49,7 +49,6 @@ import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
@ -551,7 +550,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
//the working directory, and create a unique sub dir there
FileSystem fs = getTestFileSystem();
Path newDataTestDir;
String randomStr = UUID.randomUUID().toString();
String randomStr = getRandomUUID().toString();
if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) {
newDataTestDir = new Path(getDataTestDir(), randomStr);
File dataTestDir = new File(newDataTestDir.toString());

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.UUID;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
@ -68,7 +67,7 @@ public class TestHBaseOnOtherDfsCluster {
targetFs = FileSystem.get(util2.getConfiguration());
assertFsSameUri(fs, targetFs);
Path randomFile = new Path("/"+UUID.randomUUID());
Path randomFile = new Path("/"+util1.getRandomUUID());
assertTrue(targetFs.createNewFile(randomFile));
assertTrue(fs.exists(randomFile));

View File

@ -25,7 +25,6 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -144,7 +143,7 @@ public class TestNodeHealthCheckChore {
throw new IOException("Failed mkdirs " + tempDir);
}
}
String scriptName = "HealthScript" + UUID.randomUUID().toString()
String scriptName = "HealthScript" + UTIL.getRandomUUID().toString()
+ (Shell.WINDOWS ? ".cmd" : ".sh");
healthScriptFile = new File(tempDir.getAbsolutePath(), scriptName);
conf.set(HConstants.HEALTH_SCRIPT_LOC, healthScriptFile.getAbsolutePath());

View File

@ -38,7 +38,6 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@ -357,9 +356,9 @@ public class TestFromClientSide {
Table ht = TEST_UTIL.createTable(tableName, FAMILIES);
String value = "this is the value";
String value2 = "this is some other value";
String keyPrefix1 = UUID.randomUUID().toString();
String keyPrefix2 = UUID.randomUUID().toString();
String keyPrefix3 = UUID.randomUUID().toString();
String keyPrefix1 = TEST_UTIL.getRandomUUID().toString();
String keyPrefix2 = TEST_UTIL.getRandomUUID().toString();
String keyPrefix3 = TEST_UTIL.getRandomUUID().toString();
putRows(ht, 3, value, keyPrefix1);
putRows(ht, 3, value, keyPrefix2);
putRows(ht, 3, value, keyPrefix3);
@ -449,7 +448,7 @@ public class TestFromClientSide {
private void putRows(Table ht, int numRows, String value, String key)
throws IOException {
for (int i = 0; i < numRows; i++) {
String row = key + "_" + UUID.randomUUID().toString();
String row = key + "_" + TEST_UTIL.getRandomUUID().toString();
System.out.println(String.format("Saving row: %s, with value %s", row,
value));
Put put = new Put(Bytes.toBytes(row));

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -52,7 +51,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSnapshotWithAcl.class);
public TableName TEST_TABLE = TableName.valueOf(UUID.randomUUID().toString());
public TableName TEST_TABLE = TableName.valueOf(TEST_UTIL.getRandomUUID().toString());
private static final int ROW_COUNT = 30000;
@ -197,11 +196,11 @@ public class TestSnapshotWithAcl extends SecureTestUtil {
loadData();
verifyRows(TEST_TABLE);
String snapshotName1 = UUID.randomUUID().toString();
String snapshotName1 = TEST_UTIL.getRandomUUID().toString();
admin.snapshot(snapshotName1, TEST_TABLE);
// clone snapshot with restoreAcl true.
TableName tableName1 = TableName.valueOf(UUID.randomUUID().toString());
TableName tableName1 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString());
admin.cloneSnapshot(snapshotName1, tableName1, true);
verifyRows(tableName1);
verifyAllowed(new AccessReadAction(tableName1), USER_OWNER, USER_RO, USER_RW);
@ -210,7 +209,7 @@ public class TestSnapshotWithAcl extends SecureTestUtil {
verifyDenied(new AccessWriteAction(tableName1), USER_RO, USER_NONE);
// clone snapshot with restoreAcl false.
TableName tableName2 = TableName.valueOf(UUID.randomUUID().toString());
TableName tableName2 = TableName.valueOf(TEST_UTIL.getRandomUUID().toString());
admin.cloneSnapshot(snapshotName1, tableName2, false);
verifyRows(tableName2);
verifyAllowed(new AccessReadAction(tableName2), USER_OWNER);

View File

@ -299,7 +299,7 @@ public class TestReplicationAdminWithClusters extends TestReplicationBase {
@Override
public UUID getPeerUUID() {
return UUID.randomUUID();
return utility1.getRandomUUID();
}
@Override

View File

@ -28,7 +28,6 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.List;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -221,7 +220,8 @@ public class TestHFileEncryption {
.build();
// write a new test HFile
LOG.info("Writing with " + fileContext);
Path path = new Path(TEST_UTIL.getDataTestDir(), UUID.randomUUID().toString() + ".hfile");
Path path = new Path(TEST_UTIL.getDataTestDir(),
TEST_UTIL.getRandomUUID().toString() + ".hfile");
FSDataOutputStream out = fs.create(path);
HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf)
.withOutputStream(out)

View File

@ -36,7 +36,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.LongAdder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -124,7 +123,7 @@ public class TestSplitLogManager {
conf = TEST_UTIL.getConfiguration();
// Use a different ZK wrapper instance for each tests.
zkw =
new ZKWatcher(conf, "split-log-manager-tests" + UUID.randomUUID().toString(), null);
new ZKWatcher(conf, "split-log-manager-tests" + TEST_UTIL.getRandomUUID().toString(), null);
master = new DummyMasterServices(zkw, conf);
ZKUtil.deleteChildrenRecursively(zkw, zkw.getZNodePaths().baseZNode);
@ -523,7 +522,7 @@ public class TestSplitLogManager {
Path logDirPath = new Path(new Path(dir, HConstants.HREGION_LOGDIR_NAME), serverName);
fs.mkdirs(logDirPath);
// create an empty log file
String logFile = new Path(logDirPath, UUID.randomUUID().toString()).toString();
String logFile = new Path(logDirPath, TEST_UTIL.getRandomUUID().toString()).toString();
fs.create(new Path(logDirPath, logFile)).close();
// spin up a thread mocking split done.

View File

@ -29,7 +29,6 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@ -179,7 +178,7 @@ public class TestSnapshotFileCache {
// add a random file to make sure we refresh
FileStatus randomFile = mockStoreFile(UUID.randomUUID().toString());
FileStatus randomFile = mockStoreFile(UTIL.getRandomUUID().toString());
allStoreFiles.add(randomFile);
deletableFiles = cache.getUnreferencedFiles(allStoreFiles, null);
assertEquals(randomFile, Iterables.getOnlyElement(deletableFiles));

View File

@ -24,8 +24,8 @@ import static org.junit.Assert.assertTrue;
import java.util.Date;
import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.MD5Hash;
@ -41,6 +41,8 @@ public class TestMobFileName {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMobFileName.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private String uuid;
private Date date;
private String dateStr;
@ -49,7 +51,7 @@ public class TestMobFileName {
@Before
public void setUp() {
Random random = new Random();
uuid = UUID.randomUUID().toString().replaceAll("-", "");
uuid = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
date = new Date();
dateStr = MobUtils.formatDate(date);
startKey = Bytes.toBytes(random.nextInt());

View File

@ -31,7 +31,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
@ -136,8 +135,8 @@ public class TestPartitionedMobCompactor {
Path testDir = FSUtils.getRootDir(conf);
Path mobTestDir = new Path(testDir, MobConstants.MOB_DIR_NAME);
basePath = new Path(new Path(mobTestDir, tableName), family);
mobSuffix = UUID.randomUUID().toString().replaceAll("-", "");
delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del";
mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del";
allFiles.clear();
mobFiles.clear();
delFiles.clear();
@ -832,8 +831,8 @@ public class TestPartitionedMobCompactor {
if (sameStartKey) {
// When creating multiple files under one partition, suffix needs to be different.
startRow = Bytes.toBytes(startKey);
mobSuffix = UUID.randomUUID().toString().replaceAll("-", "");
delSuffix = UUID.randomUUID().toString().replaceAll("-", "") + "_del";
mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del";
} else {
startRow = Bytes.toBytes(startKey + i);
}

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@ -103,7 +102,7 @@ public class TestClusterId {
FSDataOutputStream s = null;
try {
s = fs.create(filePath);
s.writeUTF(UUID.randomUUID().toString());
s.writeUTF(TEST_UTIL.getRandomUUID().toString());
} finally {
if (s != null) {
s.close();

View File

@ -52,7 +52,6 @@ import java.util.Map;
import java.util.NavigableMap;
import java.util.Objects;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
@ -4694,7 +4693,7 @@ public class TestHRegion {
// XXX: The spied AsyncFSWAL can not work properly because of a Mockito defect that can not
// deal with classes which have a field of an inner class. See discussions in HBASE-15536.
walConf.set(WALFactory.WAL_PROVIDER, "filesystem");
final WALFactory wals = new WALFactory(walConf, UUID.randomUUID().toString());
final WALFactory wals = new WALFactory(walConf, TEST_UTIL.getRandomUUID().toString());
final WAL wal = spy(wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build()));
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, tableDurability, wal,

View File

@ -41,7 +41,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
@ -1643,7 +1642,7 @@ public class TestHRegionReplayEvents {
byte[] valueBytes) throws IOException {
HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration());
// TODO We need a way to do this without creating files
Path testFile = new Path(testPath, UUID.randomUUID().toString());
Path testFile = new Path(testPath, TEST_UTIL.getRandomUUID().toString());
FSDataOutputStream out = TEST_UTIL.getTestFileSystem().create(testFile);
try {
hFileFactory.withOutputStream(out);

View File

@ -408,7 +408,7 @@ public abstract class AbstractTestFSWAL {
}
// Add any old cluster id.
List<UUID> clusterIds = new ArrayList<>(1);
clusterIds.add(UUID.randomUUID());
clusterIds.add(TEST_UTIL.getRandomUUID());
// Now make appends run slow.
goslow.set(true);
for (int i = 0; i < countPerFamily; i++) {

View File

@ -72,7 +72,7 @@ public class SerialReplicationTestBase {
public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint {
private static final UUID PEER_UUID = UUID.randomUUID();
private static final UUID PEER_UUID = UTIL.getRandomUUID();
@Override
public UUID getPeerUUID() {

View File

@ -401,7 +401,7 @@ public class TestReplicationEndpoint extends TestReplicationBase {
}
public static class ReplicationEndpointForTest extends BaseReplicationEndpoint {
static UUID uuid = UUID.randomUUID();
static UUID uuid = utility1.getRandomUUID();
static AtomicInteger contructedCount = new AtomicInteger();
static AtomicInteger startedCount = new AtomicInteger();
static AtomicInteger stoppedCount = new AtomicInteger();

View File

@ -82,7 +82,7 @@ public class TestRaceWhenCreatingReplicationSource {
public static final class LocalReplicationEndpoint extends BaseReplicationEndpoint {
private static final UUID PEER_UUID = UUID.randomUUID();
private static final UUID PEER_UUID = UTIL.getRandomUUID();
@Override
public UUID getPeerUUID() {

View File

@ -24,7 +24,6 @@ import static org.junit.Assert.assertTrue;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -132,7 +131,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TABLE);
try {
@ -161,7 +160,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TABLE);
try {
@ -189,7 +188,7 @@ public class TestAccessControlFilter extends SecureTestUtil {
public Object run() throws Exception {
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TABLE);
try {

View File

@ -21,7 +21,6 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -157,7 +156,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override
public Object run() throws Exception {
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {
@ -184,7 +183,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override
public Object run() throws Exception {
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {
@ -210,7 +209,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override
public Object run() throws Exception {
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {
@ -234,7 +233,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override
public Object run() throws Exception {
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {
@ -262,7 +261,7 @@ public class TestScanEarlyTermination extends SecureTestUtil {
@Override
public Object run() throws Exception {
// force a new RS connection
conf.set("testkey", UUID.randomUUID().toString());
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {

View File

@ -29,7 +29,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
@ -415,7 +414,8 @@ public class BaseTestHBaseFsck {
MobFileName mobFileName = MobFileName.create(oldFileName);
String startKey = mobFileName.getStartKey();
String date = mobFileName.getDate();
return MobFileName.create(startKey, date, UUID.randomUUID().toString().replaceAll("-", ""))
return MobFileName.create(startKey, date,
TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""))
.getFileName();
}

View File

@ -28,7 +28,6 @@ import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -249,7 +248,7 @@ public class TestFSUtils {
assertEquals(new FsPermission("700"), filePerm);
// then that the correct file is created
Path p = new Path("target" + File.separator + UUID.randomUUID().toString());
Path p = new Path("target" + File.separator + htu.getRandomUUID().toString());
try {
FSDataOutputStream out = FSUtils.create(conf, fs, p, filePerm, null);
out.close();
@ -268,7 +267,7 @@ public class TestFSUtils {
conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);
FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY);
// then that the correct file is created
String file = UUID.randomUUID().toString();
String file = htu.getRandomUUID().toString();
Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file);
Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file);
try {
@ -309,7 +308,7 @@ public class TestFSUtils {
FileSystem fs = FileSystem.get(conf);
Path testDir = htu.getDataTestDirOnTestFS("testArchiveFile");
String file = UUID.randomUUID().toString();
String file = htu.getRandomUUID().toString();
Path p = new Path(testDir, file);
FSDataOutputStream out = fs.create(p);
@ -323,7 +322,7 @@ public class TestFSUtils {
mockEnv.setValue(expect);
EnvironmentEdgeManager.injectEdge(mockEnv);
try {
String dstFile = UUID.randomUUID().toString();
String dstFile = htu.getRandomUUID().toString();
Path dst = new Path(testDir , dstFile);
assertTrue(FSUtils.renameAndSetModifyTime(fs, p, dst));
@ -369,7 +368,7 @@ public class TestFSUtils {
FSUtils.setStoragePolicy(fs, conf, testDir, HConstants.WAL_STORAGE_POLICY,
HConstants.DEFAULT_WAL_STORAGE_POLICY);
String file = UUID.randomUUID().toString();
String file =htu.getRandomUUID().toString();
Path p = new Path(testDir, file);
WriteDataToHDFS(fs, p, 4096);
// will assert existance before deleting.

View File

@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@ -118,9 +117,9 @@ public class TestFSVisitor {
Path familyDir = new Path(regionDir, familyName);
fs.mkdirs(familyDir);
for (int h = 0; h < 5; ++h) {
String hfileName = UUID.randomUUID().toString().replaceAll("-", "");
tableHFiles.add(hfileName);
fs.createNewFile(new Path(familyDir, hfileName));
String hfileName = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
tableHFiles.add(hfileName);
fs.createNewFile(new Path(familyDir, hfileName));
}
}
}

View File

@ -27,6 +27,7 @@ import java.util.List;
import java.util.SortedSet;
import java.util.UUID;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.ClassRule;
@ -46,7 +47,7 @@ public class TestRegionSplitCalculator {
HBaseClassTestRule.forClass(TestRegionSplitCalculator.class);
private static final Logger LOG = LoggerFactory.getLogger(TestRegionSplitCalculator.class);
public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
/**
* This is range uses a user specified start and end keys. It also has an
* extra tiebreaker so that different ranges with the same start/end key pair
@ -59,7 +60,7 @@ public class TestRegionSplitCalculator {
SimpleRange(byte[] start, byte[] end) {
this.start = start;
this.end = end;
this.tiebreaker = UUID.randomUUID();
this.tiebreaker = TEST_UTIL.getRandomUUID();
}
@Override

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hbase;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@ -76,7 +75,7 @@ public class HBaseZKTestingUtility extends HBaseCommonTestingUtility {
// Using randomUUID ensures that multiple clusters can be launched by
// a same test, if it stops & starts them
Path testDir = getDataTestDir("cluster_" + UUID.randomUUID().toString());
Path testDir = getDataTestDir("cluster_" + getRandomUUID().toString());
clusterTestDir = new File(testDir.toString()).getAbsoluteFile();
// Have it cleaned up on exit
boolean b = deleteOnExit();