HADOOP-17653. Do not use guava's Files.createTempDir(). (#2945)
Reviewed-by: Steve Loughran <stevel@apache.org>
Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
(cherry picked from commit f1e1809029
)
This commit is contained in:
parent
9aa6106689
commit
670205c541
|
@ -88,7 +88,18 @@ public class StateStoreFileImpl extends StateStoreFileBaseImpl {
|
||||||
if (this.rootDirectory == null) {
|
if (this.rootDirectory == null) {
|
||||||
String dir = getConf().get(FEDERATION_STORE_FILE_DIRECTORY);
|
String dir = getConf().get(FEDERATION_STORE_FILE_DIRECTORY);
|
||||||
if (dir == null) {
|
if (dir == null) {
|
||||||
File tempDir = Files.createTempDir();
|
File tempDirBase =
|
||||||
|
new File(System.getProperty("java.io.tmpdir"));
|
||||||
|
File tempDir = null;
|
||||||
|
try {
|
||||||
|
tempDir = java.nio.file.Files.createTempDirectory(
|
||||||
|
tempDirBase.toPath(), System.currentTimeMillis() + "-").toFile();
|
||||||
|
} catch (IOException e) {
|
||||||
|
// fallback to the base upon exception.
|
||||||
|
LOG.debug("Unable to create a temporary directory. Fall back to " +
|
||||||
|
" the default system temp directory {}", tempDirBase, e);
|
||||||
|
tempDir = tempDirBase;
|
||||||
|
}
|
||||||
dir = tempDir.getAbsolutePath();
|
dir = tempDir.getAbsolutePath();
|
||||||
LOG.warn("The root directory is not available, using {}", dir);
|
LOG.warn("The root directory is not available, using {}", dir);
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,8 +16,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl;
|
package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -28,7 +26,9 @@ import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMap;
|
||||||
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer;
|
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer;
|
||||||
import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap;
|
import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap;
|
||||||
import org.apache.hadoop.hdfs.server.common.FileRegion;
|
import org.apache.hadoop.hdfs.server.common.FileRegion;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.LambdaTestUtils;
|
import org.apache.hadoop.test.LambdaTestUtils;
|
||||||
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
@ -44,6 +44,7 @@ import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
@ -74,7 +75,9 @@ public class TestInMemoryLevelDBAliasMapClient {
|
||||||
|
|
||||||
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS,
|
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS,
|
||||||
"localhost:" + port);
|
"localhost:" + port);
|
||||||
tempDir = Files.createTempDir();
|
File testDir = GenericTestUtils.getTestDir();
|
||||||
|
tempDir = Files
|
||||||
|
.createTempDirectory(testDir.toPath(), "test").toFile();
|
||||||
File levelDBDir = new File(tempDir, BPID);
|
File levelDBDir = new File(tempDir, BPID);
|
||||||
levelDBDir.mkdirs();
|
levelDBDir.mkdirs();
|
||||||
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
|
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl;
|
package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -26,12 +25,14 @@ import org.apache.hadoop.hdfs.protocol.ProvidedStorageLocation;
|
||||||
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMap;
|
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMap;
|
||||||
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer;
|
import org.apache.hadoop.hdfs.server.aliasmap.InMemoryLevelDBAliasMapServer;
|
||||||
import org.apache.hadoop.hdfs.server.common.FileRegion;
|
import org.apache.hadoop.hdfs.server.common.FileRegion;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.iq80.leveldb.DBException;
|
import org.iq80.leveldb.DBException;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
|
||||||
import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
|
import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
|
||||||
import static org.mockito.Mockito.doThrow;
|
import static org.mockito.Mockito.doThrow;
|
||||||
|
@ -60,7 +61,9 @@ public class TestLevelDbMockAliasMapClient {
|
||||||
|
|
||||||
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS,
|
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_RPC_ADDRESS,
|
||||||
"localhost:" + port);
|
"localhost:" + port);
|
||||||
tempDir = Files.createTempDir();
|
File testDir = GenericTestUtils.getTestDir();
|
||||||
|
tempDir = Files
|
||||||
|
.createTempDirectory(testDir.toPath(), "test").toFile();
|
||||||
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
|
conf.set(DFSConfigKeys.DFS_PROVIDED_ALIASMAP_INMEMORY_LEVELDB_DIR,
|
||||||
tempDir.getAbsolutePath());
|
tempDir.getAbsolutePath());
|
||||||
levelDBAliasMapServer.setConf(conf);
|
levelDBAliasMapServer.setConf(conf);
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.curator.framework.CuratorFramework;
|
import org.apache.curator.framework.CuratorFramework;
|
||||||
import org.apache.curator.framework.CuratorFrameworkFactory;
|
import org.apache.curator.framework.CuratorFrameworkFactory;
|
||||||
import org.apache.curator.retry.RetryNTimes;
|
import org.apache.curator.retry.RetryNTimes;
|
||||||
import org.apache.curator.shaded.com.google.common.io.Files;
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -117,6 +116,7 @@ import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Files;
|
||||||
import java.text.MessageFormat;
|
import java.text.MessageFormat;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
@ -1140,7 +1140,10 @@ public class ServiceClient extends AppAdminClient implements SliderExitCodes,
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
String buffer = ServiceApiUtil.jsonSerDeser.toJson(app);
|
String buffer = ServiceApiUtil.jsonSerDeser.toJson(app);
|
||||||
File tmpDir = Files.createTempDir();
|
File testDir =
|
||||||
|
new File(System.getProperty("java.io.tmpdir"));
|
||||||
|
File tmpDir = Files.createTempDirectory(
|
||||||
|
testDir.toPath(), System.currentTimeMillis() + "-").toFile();
|
||||||
if (tmpDir.exists()) {
|
if (tmpDir.exists()) {
|
||||||
String serviceJsonPath = tmpDir.getAbsolutePath() + "/app.json";
|
String serviceJsonPath = tmpDir.getAbsolutePath() + "/app.json";
|
||||||
File localFile = new File(serviceJsonPath);
|
File localFile = new File(serviceJsonPath);
|
||||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.yarn.csi.client;
|
||||||
|
|
||||||
import csi.v0.Csi;
|
import csi.v0.Csi;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Assume;
|
import org.junit.Assume;
|
||||||
|
@ -30,6 +30,7 @@ import org.junit.Test;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test class for CSI client.
|
* Test class for CSI client.
|
||||||
|
@ -42,7 +43,9 @@ public class TestCsiClient {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setUp() throws IOException {
|
public static void setUp() throws IOException {
|
||||||
testRoot = Files.createTempDir();
|
File testDir = GenericTestUtils.getTestDir();
|
||||||
|
testRoot = Files
|
||||||
|
.createTempDirectory(testDir.toPath(), "test").toFile();
|
||||||
File socketPath = new File(testRoot, "csi.sock");
|
File socketPath = new File(testRoot, "csi.sock");
|
||||||
FileUtils.forceMkdirParent(socketPath);
|
FileUtils.forceMkdirParent(socketPath);
|
||||||
domainSocket = "unix://" + socketPath.getAbsolutePath();
|
domainSocket = "unix://" + socketPath.getAbsolutePath();
|
||||||
|
|
|
@ -46,7 +46,7 @@ import java.util.function.Function;
|
||||||
|
|
||||||
import org.apache.commons.compress.utils.Sets;
|
import org.apache.commons.compress.utils.Sets;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.curator.shaded.com.google.common.collect.Lists;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
import org.apache.hadoop.util.Shell.CommandExecutor;
|
import org.apache.hadoop.util.Shell.CommandExecutor;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.api.deviceplugin.Device;
|
import org.apache.hadoop.yarn.server.nodemanager.api.deviceplugin.Device;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerException;
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerException;
|
||||||
|
|
|
@ -22,7 +22,7 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.curator.shaded.com.google.common.collect.Lists;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
|
|
||||||
import org.apache.curator.shaded.com.google.common.base.Joiner;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.yarn.api.records.Resource;
|
import org.apache.hadoop.yarn.api.records.Resource;
|
||||||
import org.apache.hadoop.yarn.api.records.ResourceInformation;
|
import org.apache.hadoop.yarn.api.records.ResourceInformation;
|
||||||
|
|
Loading…
Reference in New Issue