HADOOP-17653. Do not use guava's Files.createTempDir(). (#2945)

Reviewed-by: Steve Loughran <stevel@apache.org>
Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
(cherry picked from commit f1e1809029)
This commit is contained in:
Wei-Chiu Chuang 2021-05-01 19:10:32 -07:00 committed by Akira Ajisaka
parent 9aa6106689
commit 670205c541
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
8 changed files with 36 additions and 13 deletions

View File

@ -88,7 +88,18 @@ protected String getRootDir() {
if (this.rootDirectory == null) { if (this.rootDirectory == null) {
String dir = getConf().get(FEDERATION_STORE_FILE_DIRECTORY); String dir = getConf().get(FEDERATION_STORE_FILE_DIRECTORY);
if (dir == null) { if (dir == null) {
File tempDir = Files.createTempDir(); File tempDirBase =
new File(System.getProperty("java.io.tmpdir"));
File tempDir = null;
try {
tempDir = java.nio.file.Files.createTempDirectory(
tempDirBase.toPath(), System.currentTimeMillis() + "-").toFile();
} catch (IOException e) {
// fallback to the base upon exception.
LOG.debug("Unable to create a temporary directory. Fall back to " +
" the default system temp directory {}", tempDirBase, e);
tempDir = tempDirBase;
}
dir = tempDir.getAbsolutePath(); dir = tempDir.getAbsolutePath();
LOG.warn("The root directory is not available, using {}", dir); LOG.warn("The root directory is not available, using {}", dir);
} }

View File

@ -16,8 +16,6 @@
*/ */
package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl; package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -28,7 +26,9 @@
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer; import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer;
import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap; import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap;
import org.apache.hadoop.hdfs.server.common.FileRegion; import org.apache.hadoop.hdfs.server.common.FileRegion;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils; import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
@ -44,6 +44,7 @@
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Random; import java.util.Random;
@ -74,7 +75,9 @@ public void setUp() throws IOException {
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS, conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS,
"localhost:" + port); "localhost:" + port);
tempDir = Files.createTempDir(); File testDir = GenericTestUtils.getTestDir();
tempDir = Files
.createTempDirectory(testDir.toPath(), "test").toFile();
File levelDBDir = new File(tempDir, BPID); File levelDBDir = new File(tempDir, BPID);
levelDBDir.mkdirs(); levelDBDir.mkdirs();
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR, conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl; package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -26,12 +25,14 @@
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMap; import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMap;
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer; import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer;
import org.apache.hadoop.hdfs.server.common.FileRegion; import org.apache.hadoop.hdfs.server.common.FileRegion;
import org.apache.hadoop.test.GenericTestUtils;
import org.iq80.leveldb.DBException; import org.iq80.leveldb.DBException;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.doThrow;
@ -60,7 +61,9 @@ public void setUp() throws IOException {
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS, conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS,
"localhost:" + port); "localhost:" + port);
tempDir = Files.createTempDir(); File testDir = GenericTestUtils.getTestDir();
tempDir = Files
.createTempDirectory(testDir.toPath(), "test").toFile();
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR, conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
tempDir.getAbsolutePath()); tempDir.getAbsolutePath());
levelDBAliasMapServer.setConf(conf); levelDBAliasMapServer.setConf(conf);

View File

@ -27,7 +27,6 @@
import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.RetryNTimes; import org.apache.curator.retry.RetryNTimes;
import org.apache.curator.shaded.com.google.common.io.Files;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -117,6 +116,7 @@
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.*; import java.util.*;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@ -1140,7 +1140,10 @@ private void addYarnSysFs(Path path,
return; return;
} }
String buffer = ServiceApiUtil.jsonSerDeser.toJson(app); String buffer = ServiceApiUtil.jsonSerDeser.toJson(app);
File tmpDir = Files.createTempDir(); File testDir =
new File(System.getProperty("java.io.tmpdir"));
File tmpDir = Files.createTempDirectory(
testDir.toPath(), System.currentTimeMillis() + "-").toFile();
if (tmpDir.exists()) { if (tmpDir.exists()) {
String serviceJsonPath = tmpDir.getAbsolutePath() + "/app.json"; String serviceJsonPath = tmpDir.getAbsolutePath() + "/app.json";
File localFile = new File(serviceJsonPath); File localFile = new File(serviceJsonPath);

View File

@ -20,7 +20,7 @@
import csi.v0.Csi; import csi.v0.Csi;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.thirdparty.com.google.common.io.Files; import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Assume; import org.junit.Assume;
@ -30,6 +30,7 @@
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files;
/** /**
* Test class for CSI client. * Test class for CSI client.
@ -42,7 +43,9 @@ public class TestCsiClient {
@BeforeClass @BeforeClass
public static void setUp() throws IOException { public static void setUp() throws IOException {
testRoot = Files.createTempDir(); File testDir = GenericTestUtils.getTestDir();
testRoot = Files
.createTempDirectory(testDir.toPath(), "test").toFile();
File socketPath = new File(testRoot, "csi.sock"); File socketPath = new File(testRoot, "csi.sock");
FileUtils.forceMkdirParent(socketPath); FileUtils.forceMkdirParent(socketPath);
domainSocket = "unix://" + socketPath.getAbsolutePath(); domainSocket = "unix://" + socketPath.getAbsolutePath();

View File

@ -46,7 +46,7 @@
import org.apache.commons.compress.utils.Sets; import org.apache.commons.compress.utils.Sets;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.curator.shaded.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.util.Shell.CommandExecutor; import org.apache.hadoop.util.Shell.CommandExecutor;
import org.apache.hadoop.yarn.server.nodemanager.api.deviceplugin.Device; import org.apache.hadoop.yarn.server.nodemanager.api.deviceplugin.Device;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerException;

View File

@ -22,7 +22,7 @@
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.curator.shaded.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;

View File

@ -29,7 +29,7 @@
import java.util.Map; import java.util.Map;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.curator.shaded.com.google.common.base.Joiner; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation; import org.apache.hadoop.yarn.api.records.ResourceInformation;