HADOOP-10392. Use FileSystem#makeQualified(Path) instead of Path#makeQualified(FileSystem) (ajisakaa via aw)
This commit is contained in:
parent
d964062f66
commit
4222c97108
|
@ -295,8 +295,8 @@ public class FileUtil {
|
|||
Path dst)
|
||||
throws IOException {
|
||||
if (srcFS == dstFS) {
|
||||
String srcq = src.makeQualified(srcFS).toString() + Path.SEPARATOR;
|
||||
String dstq = dst.makeQualified(dstFS).toString() + Path.SEPARATOR;
|
||||
String srcq = srcFS.makeQualified(src).toString() + Path.SEPARATOR;
|
||||
String dstq = dstFS.makeQualified(dst).toString() + Path.SEPARATOR;
|
||||
if (dstq.startsWith(srcq)) {
|
||||
if (srcq.length() == dstq.length()) {
|
||||
throw new IOException("Cannot copy " + src + " to itself.");
|
||||
|
|
|
@ -505,7 +505,7 @@ public class FTPFileSystem extends FileSystem {
|
|||
long modTime = -1; // Modification time of root dir not known.
|
||||
Path root = new Path("/");
|
||||
return new FileStatus(length, isDir, blockReplication, blockSize,
|
||||
modTime, root.makeQualified(this));
|
||||
modTime, this.makeQualified(root));
|
||||
}
|
||||
String pathName = parentPath.toUri().getPath();
|
||||
FTPFile[] ftpFiles = client.listFiles(pathName);
|
||||
|
@ -546,7 +546,7 @@ public class FTPFileSystem extends FileSystem {
|
|||
String group = ftpFile.getGroup();
|
||||
Path filePath = new Path(parentPath, ftpFile.getName());
|
||||
return new FileStatus(length, isDir, blockReplication, blockSize, modTime,
|
||||
accessTime, permission, user, group, filePath.makeQualified(this));
|
||||
accessTime, permission, user, group, this.makeQualified(filePath));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1883,7 +1883,7 @@ public class SequenceFile {
|
|||
@Deprecated
|
||||
public Reader(FileSystem fs, Path file,
|
||||
Configuration conf) throws IOException {
|
||||
this(conf, file(file.makeQualified(fs)));
|
||||
this(conf, file(fs.makeQualified(file)));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -218,8 +218,8 @@ public class TestLocalFileSystem {
|
|||
|
||||
@Test
|
||||
public void testHomeDirectory() throws IOException {
|
||||
Path home = new Path(System.getProperty("user.home"))
|
||||
.makeQualified(fileSys);
|
||||
Path home = fileSys.makeQualified(
|
||||
new Path(System.getProperty("user.home")));
|
||||
Path fsHome = fileSys.getHomeDirectory();
|
||||
assertEquals(home, fsHome);
|
||||
}
|
||||
|
@ -229,7 +229,7 @@ public class TestLocalFileSystem {
|
|||
Path path = new Path(TEST_ROOT_DIR, "foo%bar");
|
||||
writeFile(fileSys, path, 1);
|
||||
FileStatus status = fileSys.getFileStatus(path);
|
||||
assertEquals(path.makeQualified(fileSys), status.getPath());
|
||||
assertEquals(fileSys.makeQualified(path), status.getPath());
|
||||
cleanupFile(fileSys, path);
|
||||
}
|
||||
|
||||
|
|
|
@ -170,7 +170,7 @@ public class FileBench extends Configured implements Tool {
|
|||
for(int i = 0; i < argv.length; ++i) {
|
||||
try {
|
||||
if ("-dir".equals(argv[i])) {
|
||||
root = new Path(argv[++i]).makeQualified(fs);
|
||||
root = fs.makeQualified(new Path(argv[++i]));
|
||||
System.out.println("DIR: " + root.toString());
|
||||
} else if ("-seed".equals(argv[i])) {
|
||||
job.setLong("filebench.seed", Long.valueOf(argv[++i]));
|
||||
|
|
|
@ -50,8 +50,8 @@ public class MiniMRClientClusterFactory {
|
|||
|
||||
FileSystem fs = FileSystem.get(conf);
|
||||
|
||||
Path testRootDir = new Path("target", identifier + "-tmpDir")
|
||||
.makeQualified(fs);
|
||||
Path testRootDir = fs.makeQualified(
|
||||
new Path("target", identifier + "-tmpDir"));
|
||||
Path appJar = new Path(testRootDir, "MRAppJar.jar");
|
||||
|
||||
// Copy MRAppJar and make it private.
|
||||
|
|
|
@ -47,9 +47,9 @@ public class TestCombineFileInputFormat {
|
|||
throw new RuntimeException("init failure", e);
|
||||
}
|
||||
}
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "/tmp")),
|
||||
"TestCombineFileInputFormat").makeQualified(localFs);
|
||||
private static Path workDir = localFs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"),
|
||||
"TestCombineFileInputFormat"));
|
||||
|
||||
private static void writeFile(FileSystem fs, Path name,
|
||||
String contents) throws IOException {
|
||||
|
|
|
@ -53,10 +53,9 @@ public class TestCombineSequenceFileInputFormat {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "/tmp")),
|
||||
"TestCombineSequenceFileInputFormat").makeQualified(localFs);
|
||||
private static Path workDir = localFs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"),
|
||||
"TestCombineSequenceFileInputFormat"));
|
||||
|
||||
@Test(timeout=10000)
|
||||
public void testFormat() throws Exception {
|
||||
|
|
|
@ -60,10 +60,9 @@ public class TestCombineTextInputFormat {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "/tmp")),
|
||||
"TestCombineTextInputFormat").makeQualified(localFs);
|
||||
private static Path workDir = localFs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"),
|
||||
"TestCombineTextInputFormat"));
|
||||
|
||||
// A reporter that does nothing
|
||||
private static final Reporter voidReporter = Reporter.NULL;
|
||||
|
|
|
@ -84,9 +84,9 @@ public class TestConcatenatedCompressedInput {
|
|||
public void after() {
|
||||
ZlibFactory.loadNativeZLib();
|
||||
}
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "/tmp")),
|
||||
"TestConcatenatedCompressedInput").makeQualified(localFs);
|
||||
private static Path workDir = localFs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"),
|
||||
"TestConcatenatedCompressedInput"));
|
||||
|
||||
private static LineReader makeStream(String str) throws IOException {
|
||||
return new LineReader(new ByteArrayInputStream(str.getBytes("UTF-8")),
|
||||
|
|
|
@ -342,8 +342,8 @@ public class TestMapRed extends Configured implements Tool {
|
|||
values.add(m);
|
||||
m = m.replace((char)('A' + i - 1), (char)('A' + i));
|
||||
}
|
||||
Path testdir = new Path(
|
||||
System.getProperty("test.build.data","/tmp")).makeQualified(fs);
|
||||
Path testdir = fs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data","/tmp")));
|
||||
fs.delete(testdir, true);
|
||||
Path inFile = new Path(testdir, "nullin/blah");
|
||||
SequenceFile.Writer w = SequenceFile.createWriter(fs, conf, inFile,
|
||||
|
|
|
@ -75,8 +75,8 @@ public class TestMiniMRChildTask {
|
|||
}
|
||||
}
|
||||
|
||||
private static Path TEST_ROOT_DIR = new Path("target",
|
||||
TestMiniMRChildTask.class.getName() + "-tmpDir").makeQualified(localFs);
|
||||
private static Path TEST_ROOT_DIR = localFs.makeQualified(
|
||||
new Path("target", TestMiniMRChildTask.class.getName() + "-tmpDir"));
|
||||
static Path APP_JAR = new Path(TEST_ROOT_DIR, "MRAppJar.jar");
|
||||
|
||||
/**
|
||||
|
|
|
@ -61,10 +61,10 @@ public class TestTextInputFormat {
|
|||
throw new RuntimeException("init failure", e);
|
||||
}
|
||||
}
|
||||
@SuppressWarnings("deprecation")
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "/tmp")),
|
||||
"TestTextInputFormat").makeQualified(localFs);
|
||||
|
||||
private static Path workDir = localFs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"),
|
||||
"TestTextInputFormat"));
|
||||
|
||||
@Test (timeout=500000)
|
||||
public void testFormat() throws Exception {
|
||||
|
|
|
@ -50,8 +50,8 @@ public class TestWrappedRecordReaderClassloader {
|
|||
assertTrue(job.getClassLoader() instanceof Fake_ClassLoader);
|
||||
|
||||
FileSystem fs = FileSystem.get(job);
|
||||
Path testdir = new Path(System.getProperty("test.build.data", "/tmp"))
|
||||
.makeQualified(fs);
|
||||
Path testdir = fs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp")));
|
||||
|
||||
Path base = new Path(testdir, "/empty");
|
||||
Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") };
|
||||
|
|
|
@ -50,8 +50,8 @@ public class TestWrappedRRClassloader {
|
|||
assertTrue(conf.getClassLoader() instanceof Fake_ClassLoader);
|
||||
|
||||
FileSystem fs = FileSystem.get(conf);
|
||||
Path testdir = new Path(System.getProperty("test.build.data", "/tmp"))
|
||||
.makeQualified(fs);
|
||||
Path testdir = fs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp")));
|
||||
|
||||
Path base = new Path(testdir, "/empty");
|
||||
Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") };
|
||||
|
|
|
@ -330,7 +330,7 @@ public class MRAsyncDiskService {
|
|||
* Returns the normalized path of a path.
|
||||
*/
|
||||
private String normalizePath(String path) {
|
||||
return (new Path(path)).makeQualified(this.localFileSystem)
|
||||
return this.localFileSystem.makeQualified(new Path(path))
|
||||
.toUri().getPath();
|
||||
}
|
||||
|
||||
|
|
|
@ -128,8 +128,8 @@ public class TestMRJobs {
|
|||
}
|
||||
}
|
||||
|
||||
private static Path TEST_ROOT_DIR = new Path("target",
|
||||
TestMRJobs.class.getName() + "-tmpDir").makeQualified(localFs);
|
||||
private static Path TEST_ROOT_DIR = localFs.makeQualified(
|
||||
new Path("target", TestMRJobs.class.getName() + "-tmpDir"));
|
||||
static Path APP_JAR = new Path(TEST_ROOT_DIR, "MRAppJar.jar");
|
||||
private static final String OUTPUT_ROOT_DIR = "/tmp/" +
|
||||
TestMRJobs.class.getSimpleName();
|
||||
|
|
|
@ -73,8 +73,8 @@ public class TestMRJobsWithHistoryService {
|
|||
}
|
||||
}
|
||||
|
||||
private static Path TEST_ROOT_DIR = new Path("target",
|
||||
TestMRJobs.class.getName() + "-tmpDir").makeQualified(localFs);
|
||||
private static Path TEST_ROOT_DIR = localFs.makeQualified(
|
||||
new Path("target", TestMRJobs.class.getName() + "-tmpDir"));
|
||||
static Path APP_JAR = new Path(TEST_ROOT_DIR, "MRAppJar.jar");
|
||||
|
||||
@Before
|
||||
|
|
|
@ -473,7 +473,7 @@ public class HadoopArchives implements Tool {
|
|||
conf.setLong(HAR_BLOCKSIZE_LABEL, blockSize);
|
||||
conf.setLong(HAR_PARTSIZE_LABEL, partSize);
|
||||
conf.set(DST_HAR_LABEL, archiveName);
|
||||
conf.set(SRC_PARENT_LABEL, parentPath.makeQualified(fs).toString());
|
||||
conf.set(SRC_PARENT_LABEL, fs.makeQualified(parentPath).toString());
|
||||
conf.setInt(HAR_REPLICATION_LABEL, repl);
|
||||
Path outputPath = new Path(dest, archiveName);
|
||||
FileOutputFormat.setOutputPath(conf, outputPath);
|
||||
|
|
|
@ -447,7 +447,7 @@ public class Gridmix extends Configured implements Tool {
|
|||
|
||||
// Create <ioPath> with 777 permissions
|
||||
final FileSystem inputFs = ioPath.getFileSystem(conf);
|
||||
ioPath = ioPath.makeQualified(inputFs);
|
||||
ioPath = inputFs.makeQualified(ioPath);
|
||||
boolean succeeded = false;
|
||||
try {
|
||||
succeeded = FileSystem.mkdirs(inputFs, ioPath,
|
||||
|
|
|
@ -116,7 +116,7 @@ class PseudoLocalFs extends FileSystem {
|
|||
* @throws FileNotFoundException
|
||||
*/
|
||||
long validateFileNameFormat(Path path) throws FileNotFoundException {
|
||||
path = path.makeQualified(this);
|
||||
path = this.makeQualified(path);
|
||||
boolean valid = true;
|
||||
long fileSize = 0;
|
||||
if (!path.toUri().getScheme().equals(getUri().getScheme())) {
|
||||
|
@ -329,4 +329,10 @@ class PseudoLocalFs extends FileSystem {
|
|||
throw new UnsupportedOperationException("SetWorkingDirectory "
|
||||
+ "is not supported in pseudo local file system.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Path makeQualified(Path path) {
|
||||
// skip FileSystem#checkPath() to validate some other Filesystems
|
||||
return path.makeQualified(this.getUri(), this.getWorkingDirectory());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,8 +48,8 @@ public class TestFilePool {
|
|||
try {
|
||||
final Configuration conf = new Configuration();
|
||||
final FileSystem fs = FileSystem.getLocal(conf).getRaw();
|
||||
return new Path(System.getProperty("test.build.data", "/tmp"),
|
||||
"testFilePool").makeQualified(fs);
|
||||
return fs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"), "testFilePool"));
|
||||
} catch (IOException e) {
|
||||
fail();
|
||||
}
|
||||
|
|
|
@ -48,8 +48,8 @@ public class TestFileQueue {
|
|||
public static void setup() throws IOException {
|
||||
final Configuration conf = new Configuration();
|
||||
final FileSystem fs = FileSystem.getLocal(conf).getRaw();
|
||||
final Path p = new Path(System.getProperty("test.build.data", "/tmp"),
|
||||
"testFileQueue").makeQualified(fs);
|
||||
final Path p = fs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"), "testFileQueue"));
|
||||
fs.delete(p, true);
|
||||
final byte[] b = new byte[BLOCK];
|
||||
for (int i = 0; i < NFILES; ++i) {
|
||||
|
@ -71,8 +71,8 @@ public class TestFileQueue {
|
|||
public static void cleanup() throws IOException {
|
||||
final Configuration conf = new Configuration();
|
||||
final FileSystem fs = FileSystem.getLocal(conf).getRaw();
|
||||
final Path p = new Path(System.getProperty("test.build.data", "/tmp"),
|
||||
"testFileQueue").makeQualified(fs);
|
||||
final Path p = fs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"), "testFileQueue"));
|
||||
fs.delete(p, true);
|
||||
}
|
||||
|
||||
|
|
|
@ -224,7 +224,7 @@ public class TestPseudoLocalFs {
|
|||
|
||||
// Validate operations on valid qualified path
|
||||
path = new Path("myPsedoFile.1237");
|
||||
path = path.makeQualified(pfs);
|
||||
path = pfs.makeQualified(path);
|
||||
validateGetFileStatus(pfs, path, true);
|
||||
validateCreate(pfs, path, true);
|
||||
validateOpen(pfs, path, true);
|
||||
|
|
|
@ -40,8 +40,8 @@ public class TestUserResolve {
|
|||
public static void createRootDir() throws IOException {
|
||||
conf = new Configuration();
|
||||
fs = FileSystem.getLocal(conf);
|
||||
rootDir = new Path(new Path(System.getProperty("test.build.data", "/tmp"))
|
||||
.makeQualified(fs), "gridmixUserResolve");
|
||||
rootDir = new Path(fs.makeQualified(new Path(
|
||||
System.getProperty("test.build.data", "/tmp"))), "gridmixUserResolve");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -278,7 +278,7 @@ public class SwiftTestUtils extends org.junit.Assert {
|
|||
noteAction(action);
|
||||
try {
|
||||
if (fileSystem != null) {
|
||||
fileSystem.delete(new Path(cleanupPath).makeQualified(fileSystem),
|
||||
fileSystem.delete(fileSystem.makeQualified(new Path(cleanupPath)),
|
||||
true);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -159,7 +159,7 @@ public class SwiftFileSystemBaseTest extends Assert implements
|
|||
* @return a qualified path instance
|
||||
*/
|
||||
protected Path path(String pathString) {
|
||||
return new Path(pathString).makeQualified(fs);
|
||||
return fs.makeQualified(new Path(pathString));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -126,7 +126,7 @@ public class TestSwiftFileSystemPartitionedUploads extends
|
|||
SwiftTestUtils.compareByteArrays(src, dest, len);
|
||||
FileStatus status;
|
||||
|
||||
final Path qualifiedPath = path.makeQualified(fs);
|
||||
final Path qualifiedPath = fs.makeQualified(path);
|
||||
status = fs.getFileStatus(qualifiedPath);
|
||||
//now see what block location info comes back.
|
||||
//This will vary depending on the Swift version, so the results
|
||||
|
@ -216,7 +216,7 @@ public class TestSwiftFileSystemPartitionedUploads extends
|
|||
|
||||
private FileStatus validatePathLen(Path path, int len) throws IOException {
|
||||
//verify that the length is what was written in a direct status check
|
||||
final Path qualifiedPath = path.makeQualified(fs);
|
||||
final Path qualifiedPath = fs.makeQualified(path);
|
||||
FileStatus[] parentDirListing = fs.listStatus(qualifiedPath.getParent());
|
||||
StringBuilder listing = lsToString(parentDirListing);
|
||||
String parentDirLS = listing.toString();
|
||||
|
|
|
@ -57,8 +57,8 @@ public class TestHistograms {
|
|||
public void testHistograms() throws IOException {
|
||||
final Configuration conf = new Configuration();
|
||||
final FileSystem lfs = FileSystem.getLocal(conf);
|
||||
final Path rootInputDir = new Path(
|
||||
System.getProperty("test.tools.input.dir", "")).makeQualified(lfs);
|
||||
final Path rootInputDir = lfs.makeQualified(new Path(
|
||||
System.getProperty("test.tools.input.dir", "target/input")));
|
||||
final Path rootInputFile = new Path(rootInputDir, "rumen/histogram-tests");
|
||||
|
||||
|
||||
|
@ -132,7 +132,7 @@ public class TestHistograms {
|
|||
final FileSystem lfs = FileSystem.getLocal(conf);
|
||||
|
||||
for (String arg : args) {
|
||||
Path filePath = new Path(arg).makeQualified(lfs);
|
||||
Path filePath = lfs.makeQualified(new Path(arg));
|
||||
String fileName = filePath.getName();
|
||||
if (fileName.startsWith("input")) {
|
||||
LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs);
|
||||
|
|
|
@ -22,13 +22,11 @@ import java.io.File;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
||||
|
@ -41,12 +39,12 @@ import org.apache.commons.cli.Options;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.permission.FsAction;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
|
||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
||||
import org.apache.hadoop.mapred.FileInputFormat;
|
||||
|
@ -56,7 +54,6 @@ import org.apache.hadoop.mapred.JobClient;
|
|||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.JobID;
|
||||
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
|
||||
import org.apache.hadoop.mapred.OutputFormat;
|
||||
import org.apache.hadoop.mapred.RunningJob;
|
||||
import org.apache.hadoop.mapred.SequenceFileAsTextInputFormat;
|
||||
import org.apache.hadoop.mapred.SequenceFileInputFormat;
|
||||
|
@ -65,6 +62,7 @@ import org.apache.hadoop.mapred.TextOutputFormat;
|
|||
import org.apache.hadoop.mapred.lib.LazyOutputFormat;
|
||||
import org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorCombiner;
|
||||
import org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorReducer;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.streaming.io.IdentifierResolver;
|
||||
import org.apache.hadoop.streaming.io.InputWriter;
|
||||
import org.apache.hadoop.streaming.io.OutputReader;
|
||||
|
@ -297,7 +295,10 @@ public class StreamJob implements Tool {
|
|||
try {
|
||||
Path path = new Path(file);
|
||||
FileSystem localFs = FileSystem.getLocal(config_);
|
||||
String finalPath = path.makeQualified(localFs).toString();
|
||||
Path qualifiedPath = path.makeQualified(
|
||||
localFs.getUri(), localFs.getWorkingDirectory());
|
||||
validate(qualifiedPath);
|
||||
String finalPath = qualifiedPath.toString();
|
||||
if(fileList.length() > 0) {
|
||||
fileList.append(',');
|
||||
}
|
||||
|
@ -313,7 +314,6 @@ public class StreamJob implements Tool {
|
|||
tmpFiles = tmpFiles + "," + fileList;
|
||||
}
|
||||
config_.set("tmpfiles", tmpFiles);
|
||||
validate(packageFiles_);
|
||||
}
|
||||
|
||||
String fsName = cmdLine.getOptionValue("dfs");
|
||||
|
@ -391,14 +391,13 @@ public class StreamJob implements Tool {
|
|||
return OptionBuilder.withDescription(desc).create(name);
|
||||
}
|
||||
|
||||
private void validate(final List<String> values)
|
||||
throws IllegalArgumentException {
|
||||
for (String file : values) {
|
||||
File f = new File(file);
|
||||
if (!FileUtil.canRead(f)) {
|
||||
fail("File: " + f.getAbsolutePath()
|
||||
+ " does not exist, or is not readable.");
|
||||
}
|
||||
private void validate(final Path path) throws IOException {
|
||||
try {
|
||||
path.getFileSystem(config_).access(path, FsAction.READ);
|
||||
} catch (FileNotFoundException e) {
|
||||
fail("File: " + path + " does not exist.");
|
||||
} catch (AccessControlException e) {
|
||||
fail("File: " + path + " is not readable.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue