HADOOP-6271. Add recursive and non recursive create and mkdir to FileContext. Contributed by Sanjay Radia.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@816818 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1695ecd1a3
commit
fa48d9ea17
|
@ -595,6 +595,9 @@ Trunk (unreleased changes)
|
|||
HADOOP-6166. Further improve the performance of the pure-Java CRC32
|
||||
implementation. (Tsz Wo (Nicholas), SZE via cdouglas)
|
||||
|
||||
HADOOP-6271. Add recursive and non recursive create and mkdir to
|
||||
FileContext. (Sanjay Radia via suresh)
|
||||
|
||||
BUG FIXES
|
||||
|
||||
HADOOP-5379. CBZip2InputStream to throw IOException on data crc error.
|
||||
|
|
|
@ -467,6 +467,9 @@ public final class FileContext {
|
|||
* Permission - umask is applied against permisssion:
|
||||
* default FsPermissions:getDefault()
|
||||
* @see #setPermission(Path, FsPermission)
|
||||
* CreateParent - create missing parent path
|
||||
* default is to not create parents
|
||||
*
|
||||
* The defaults for the following are SS defaults of the
|
||||
* file server implementing the tart path.
|
||||
* Not all parameters make sense for all kinds of filesystem
|
||||
|
@ -475,9 +478,11 @@ public final class FileContext {
|
|||
* Blocksize - block size for file blocks
|
||||
* ReplicationFactor - replication for blocks
|
||||
* BytesPerChecksum - bytes per checksum
|
||||
*
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
@SuppressWarnings("deprecation") // call to primitiveCreate
|
||||
public FSDataOutputStream create(final Path f,
|
||||
final EnumSet<CreateFlag> createFlag,
|
||||
CreateOpts... opts)
|
||||
|
@ -485,99 +490,57 @@ public final class FileContext {
|
|||
Path absF = fixRelativePart(f);
|
||||
FileSystem fsOfAbsF = getFSofPath(absF);
|
||||
|
||||
int bufferSize = -1;
|
||||
short replication = -1;
|
||||
long blockSize = -1;
|
||||
int bytesPerChecksum = -1;
|
||||
// If one of the options is a permission, extract it & apply umask
|
||||
// If not, add a default Perms and apply umask;
|
||||
// FileSystem#create
|
||||
|
||||
FsPermission permission = null;
|
||||
Progressable progress = null;
|
||||
|
||||
for (CreateOpts iOpt : opts) {
|
||||
if (CreateOpts.BlockSize.class.isInstance(iOpt)) {
|
||||
if (blockSize != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
|
||||
if (opts != null) {
|
||||
for (int i = 0; i < opts.length; ++i) {
|
||||
if (opts[i] instanceof CreateOpts.Perms) {
|
||||
if (permission != null)
|
||||
throw new IllegalArgumentException("multiple permissions varargs");
|
||||
permission = ((CreateOpts.Perms) opts[i]).getValue();
|
||||
opts[i] = CreateOpts.perms(permission.applyUMask(umask));
|
||||
}
|
||||
blockSize = ((CreateOpts.BlockSize) iOpt).getValue();
|
||||
} else if (CreateOpts.BufferSize.class.isInstance(iOpt)) {
|
||||
if (bufferSize != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
bufferSize = ((CreateOpts.BufferSize) iOpt).getValue();
|
||||
} else if (CreateOpts.ReplicationFactor.class.isInstance(iOpt)) {
|
||||
if (replication != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
replication = ((CreateOpts.ReplicationFactor) iOpt).getValue();
|
||||
} else if (CreateOpts.BytesPerChecksum.class.isInstance(iOpt)) {
|
||||
if (bytesPerChecksum != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
bytesPerChecksum = ((CreateOpts.BytesPerChecksum) iOpt).getValue();
|
||||
} else if (CreateOpts.Perms.class.isInstance(iOpt)) {
|
||||
if (permission != null) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
permission = ((CreateOpts.Perms) iOpt).getValue();
|
||||
} else if (CreateOpts.Progress.class.isInstance(iOpt)) {
|
||||
if (progress != null) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
progress = ((CreateOpts.Progress) iOpt).getValue();
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unkown CreateOpts of type " +
|
||||
iOpt.getClass().getName());
|
||||
}
|
||||
}
|
||||
if (blockSize % bytesPerChecksum != 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"blockSize should be a multiple of checksumsize");
|
||||
}
|
||||
|
||||
FsServerDefaults ssDef = fsOfAbsF.getServerDefaults();
|
||||
|
||||
if (blockSize == -1) {
|
||||
blockSize = ssDef.getBlockSize();
|
||||
}
|
||||
if (bufferSize == -1) {
|
||||
bufferSize = ssDef.getFileBufferSize();
|
||||
}
|
||||
if (replication == -1) {
|
||||
replication = ssDef.getReplication();
|
||||
}
|
||||
if (bytesPerChecksum == -1) {
|
||||
bytesPerChecksum = ssDef.getBytesPerChecksum();
|
||||
}
|
||||
if (permission == null) {
|
||||
permission = FsPermission.getDefault();
|
||||
}
|
||||
|
||||
FsPermission absPerms = (permission == null ?
|
||||
FsPermission.getDefault() : permission).applyUMask(umask);
|
||||
|
||||
return fsOfAbsF.primitiveCreate(absF, absPerms, createFlag,
|
||||
bufferSize, replication, blockSize, progress, bytesPerChecksum);
|
||||
CreateOpts[] theOpts = opts;
|
||||
if (permission == null) { // no permission was set
|
||||
CreateOpts[] newOpts = new CreateOpts[opts.length + 1];
|
||||
System.arraycopy(opts, 0, newOpts, 0, opts.length);
|
||||
newOpts[opts.length] =
|
||||
CreateOpts.perms(FsPermission.getDefault().applyUMask(umask));
|
||||
theOpts = newOpts;
|
||||
}
|
||||
return fsOfAbsF.primitiveCreate(absF, createFlag, theOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Make the given file and all non-existent parents into
|
||||
* directories. Has the semantics of Unix 'mkdir -p'.
|
||||
* Existence of the directory hierarchy is not an error.
|
||||
* directories.
|
||||
*
|
||||
* @param dir - the dir to make
|
||||
* @param permission - permissions is set permission&~umask
|
||||
* @return true if the operation succeeds; false if dir already exists
|
||||
* @throws IOException when operation fails (e.g. permissions) etc.
|
||||
* @param createParent - if true then missing parent dirs are created
|
||||
* if false then parent must exist
|
||||
* @throws IOException when operation fails not authorized or
|
||||
* if parent does not exist and createParent is false.
|
||||
*/
|
||||
public boolean mkdirs(final Path dir, final FsPermission permission)
|
||||
@SuppressWarnings("deprecation") // call to primitiveMkdir
|
||||
public void mkdir(final Path dir, final FsPermission permission,
|
||||
final boolean createParent)
|
||||
throws IOException {
|
||||
Path absDir = fixRelativePart(dir);
|
||||
FsPermission absFerms = (permission == null ?
|
||||
FsPermission.getDefault() : permission).applyUMask(umask);
|
||||
return getFSofPath(absDir).primitiveMkdir(absDir, absFerms);
|
||||
getFSofPath(absDir).primitiveMkdir(absDir, absFerms, createParent);
|
||||
}
|
||||
|
||||
/** Delete a file.
|
||||
*
|
||||
/**
|
||||
* Delete a file.
|
||||
* @param f the path to delete.
|
||||
* @param recursive if path is a directory and set to
|
||||
* true, the directory is deleted else throws an exception. In
|
||||
|
@ -1196,10 +1159,8 @@ public final class FileContext {
|
|||
checkDest(qSrc.getName(), qDst, false);
|
||||
if (isDirectory(qSrc)) {
|
||||
checkDependencies(qSrc, qDst);
|
||||
if (!mkdirs(qDst, FsPermission.getDefault())) {
|
||||
throw new IOException("Failed to create destination directory `" +
|
||||
qDst + "'");
|
||||
}
|
||||
mkdir(qDst, FsPermission.getDefault(), true);
|
||||
|
||||
FileStatus[] contents = FileContext.this.listStatus(qSrc);
|
||||
for (FileStatus content : contents) {
|
||||
copy(content.getPath(), new Path(qDst, content.getPath()),
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.fs.Options.CreateOpts;
|
||||
import org.apache.hadoop.fs.Options.Rename;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.io.MultipleIOException;
|
||||
|
@ -589,10 +590,9 @@ public abstract class FileSystem extends Configured implements Closeable {
|
|||
EnumSet<CreateFlag> flag, int bufferSize, short replication, long blockSize,
|
||||
Progressable progress) throws IOException ;
|
||||
|
||||
/*
|
||||
* This version of the create method assumes that the permission
|
||||
* of create does not matter.
|
||||
* It has been added to support the FileContext that processes the permission
|
||||
/*.
|
||||
* This create has been added to support the FileContext that processes
|
||||
* the permission
|
||||
* with umask before calling this method.
|
||||
* This a temporary method added to support the transition from FileSystem
|
||||
* to FileContext for user applications.
|
||||
|
@ -612,10 +612,115 @@ public abstract class FileSystem extends Configured implements Closeable {
|
|||
blockSize, progress);
|
||||
}
|
||||
|
||||
|
||||
/*.
|
||||
* This create has been added to support the FileContext that passes
|
||||
* an absolute permission with (ie umask was already applied)
|
||||
* This a temporary method added to support the transition from FileSystem
|
||||
* to FileContext for user applications.
|
||||
*/
|
||||
@Deprecated
|
||||
protected FSDataOutputStream primitiveCreate(final Path f,
|
||||
final EnumSet<CreateFlag> createFlag,
|
||||
CreateOpts... opts) throws IOException {
|
||||
checkPath(f);
|
||||
int bufferSize = -1;
|
||||
short replication = -1;
|
||||
long blockSize = -1;
|
||||
int bytesPerChecksum = -1;
|
||||
FsPermission permission = null;
|
||||
Progressable progress = null;
|
||||
Boolean createParent = null;
|
||||
|
||||
for (CreateOpts iOpt : opts) {
|
||||
if (CreateOpts.BlockSize.class.isInstance(iOpt)) {
|
||||
if (blockSize != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
blockSize = ((CreateOpts.BlockSize) iOpt).getValue();
|
||||
} else if (CreateOpts.BufferSize.class.isInstance(iOpt)) {
|
||||
if (bufferSize != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
bufferSize = ((CreateOpts.BufferSize) iOpt).getValue();
|
||||
} else if (CreateOpts.ReplicationFactor.class.isInstance(iOpt)) {
|
||||
if (replication != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
replication = ((CreateOpts.ReplicationFactor) iOpt).getValue();
|
||||
} else if (CreateOpts.BytesPerChecksum.class.isInstance(iOpt)) {
|
||||
if (bytesPerChecksum != -1) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
bytesPerChecksum = ((CreateOpts.BytesPerChecksum) iOpt).getValue();
|
||||
} else if (CreateOpts.Perms.class.isInstance(iOpt)) {
|
||||
if (permission != null) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
permission = ((CreateOpts.Perms) iOpt).getValue();
|
||||
} else if (CreateOpts.Progress.class.isInstance(iOpt)) {
|
||||
if (progress != null) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
progress = ((CreateOpts.Progress) iOpt).getValue();
|
||||
} else if (CreateOpts.CreateParent.class.isInstance(iOpt)) {
|
||||
if (createParent != null) {
|
||||
throw new IllegalArgumentException("multiple varargs of same kind");
|
||||
}
|
||||
createParent = ((CreateOpts.CreateParent) iOpt).getValue();
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unkown CreateOpts of type " +
|
||||
iOpt.getClass().getName());
|
||||
}
|
||||
}
|
||||
if (blockSize % bytesPerChecksum != 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"blockSize should be a multiple of checksumsize");
|
||||
}
|
||||
|
||||
FsServerDefaults ssDef = getServerDefaults();
|
||||
|
||||
if (blockSize == -1) {
|
||||
blockSize = ssDef.getBlockSize();
|
||||
}
|
||||
if (bufferSize == -1) {
|
||||
bufferSize = ssDef.getFileBufferSize();
|
||||
}
|
||||
if (replication == -1) {
|
||||
replication = ssDef.getReplication();
|
||||
}
|
||||
if (permission == null) {
|
||||
permission = FsPermission.getDefault();
|
||||
}
|
||||
if (createParent == null) {
|
||||
createParent = false;
|
||||
}
|
||||
|
||||
// Default impl assumes that permissions do not matter and
|
||||
// nor does the bytesPerChecksum hence
|
||||
// calling the regular create is good enough.
|
||||
// FSs that implement permissions should override this.
|
||||
|
||||
if (!createParent) { // parent must exist.
|
||||
// since this.create makes parent dirs automatically
|
||||
// we must throw exception if parent does not exist.
|
||||
final FileStatus stat = getFileStatus(f.getParent());
|
||||
if (stat == null) {
|
||||
throw new FileNotFoundException("Missing parent:" + f);
|
||||
}
|
||||
if (!stat.isDir()) {
|
||||
throw new ParentNotDirectoryException("parent is not a dir:" + f);
|
||||
}
|
||||
// parent does exist - go ahead with create of file.
|
||||
}
|
||||
return this.create(f, permission, createFlag, bufferSize, replication,
|
||||
blockSize, progress);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This version of the mkdirs method assumes that the permission.
|
||||
* It has been added to support the FileContext that processes the the permission
|
||||
* This version of the mkdirs method assumes that the permission is absolute.
|
||||
* It has been added to support the FileContext that processes the permission
|
||||
* with umask before calling this method.
|
||||
* This a temporary method added to support the transition from FileSystem
|
||||
* to FileContext for user applications.
|
||||
|
@ -630,6 +735,39 @@ public abstract class FileSystem extends Configured implements Closeable {
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* This version of the mkdirs method assumes that the permission is absolute.
|
||||
* It has been added to support the FileContext that processes the permission
|
||||
* with umask before calling this method.
|
||||
* This a temporary method added to support the transition from FileSystem
|
||||
* to FileContext for user applications.
|
||||
*/
|
||||
@Deprecated
|
||||
protected void primitiveMkdir(Path f, FsPermission absolutePermission,
|
||||
boolean createParent)
|
||||
throws IOException {
|
||||
|
||||
if (!createParent) { // parent must exist.
|
||||
// since the this.mkdirs makes parent dirs automatically
|
||||
// we must throw exception if parent does not exist.
|
||||
final FileStatus stat = getFileStatus(f.getParent());
|
||||
if (stat == null) {
|
||||
throw new FileNotFoundException("Missing parent:" + f);
|
||||
}
|
||||
if (!stat.isDir()) {
|
||||
throw new ParentNotDirectoryException("parent is not a dir");
|
||||
}
|
||||
// parent does exist - go ahead with mkdir of leaf
|
||||
}
|
||||
// Default impl is to assume that permissions do not matter and hence
|
||||
// calling the regular mkdirs is good enough.
|
||||
// FSs that implement permissions should override this.
|
||||
if (!this.mkdirs(f, absolutePermission)) {
|
||||
throw new IOException("mkdir of "+ f + " failed");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates the given Path as a brand-new zero-length file. If
|
||||
* create fails, or if it already existed, return false.
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.hadoop.util.Progressable;
|
|||
/**
|
||||
* This class contains options related to file system operations.
|
||||
*/
|
||||
public class Options {
|
||||
public final class Options {
|
||||
/**
|
||||
* Class to support the varargs for create() options.
|
||||
*
|
||||
|
@ -45,6 +45,12 @@ public class Options {
|
|||
public static Perms perms(FsPermission perm) {
|
||||
return new Perms(perm);
|
||||
}
|
||||
public static CreateParent createParent() {
|
||||
return new CreateParent(true);
|
||||
}
|
||||
public static CreateParent donotCreateParent() {
|
||||
return new CreateParent(false);
|
||||
}
|
||||
|
||||
static class BlockSize extends CreateOpts {
|
||||
private final long blockSize;
|
||||
|
@ -115,6 +121,13 @@ public class Options {
|
|||
}
|
||||
Progressable getValue() { return progress; }
|
||||
}
|
||||
|
||||
static class CreateParent extends CreateOpts {
|
||||
private final Boolean createParent;
|
||||
protected CreateParent(boolean createPar) {
|
||||
createParent = createPar;}
|
||||
Boolean getValue() { return createParent; }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,189 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
|
||||
import org.apache.hadoop.fs.Options.CreateOpts;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* A collection of tests for the {@link FileContext}, create method
|
||||
* This test should be used for testing an instance of FileContext
|
||||
* that has been initialized to a specific default FileSystem such a
|
||||
* LocalFileSystem, HDFS,S3, etc.
|
||||
* </p>
|
||||
* <p>
|
||||
* To test a given {@link FileSystem} implementation create a subclass of this
|
||||
* test and override {@link #setUp()} to initialize the <code>fc</code>
|
||||
* {@link FileContext} instance variable.
|
||||
*
|
||||
* Since this a junit 4 you can also do a single setup before
|
||||
* the start of any tests.
|
||||
* E.g.
|
||||
* @BeforeClass public static void clusterSetupAtBegining()
|
||||
* @AfterClass public static void ClusterShutdownAtEnd()
|
||||
* </p>
|
||||
*/
|
||||
|
||||
public class FileContextCreateMkdirBaseTest {
|
||||
|
||||
protected static FileContext fc;
|
||||
static final String TEST_ROOT_DIR = new Path(System.getProperty(
|
||||
"test.build.data", "/tmp")).toString().replace(' ', '_')
|
||||
+ "/test";
|
||||
|
||||
protected Path getTestRootRelativePath(String pathString) {
|
||||
return fc.makeQualified(new Path(TEST_ROOT_DIR, pathString));
|
||||
}
|
||||
|
||||
private Path rootPath = null;
|
||||
protected Path getTestRoot() {
|
||||
if (rootPath == null) {
|
||||
rootPath = fc.makeQualified(new Path(TEST_ROOT_DIR));
|
||||
}
|
||||
return rootPath;
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
try {
|
||||
((org.apache.commons.logging.impl.Log4JLogger)FileSystem.LOG).getLogger()
|
||||
.setLevel(org.apache.log4j.Level.DEBUG);
|
||||
}
|
||||
catch(Exception e) {
|
||||
System.out.println("Cannot change log level\n"
|
||||
+ StringUtils.stringifyException(e));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
fc.mkdir(getTestRoot(), FileContext.DEFAULT_PERM, true);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
fc.delete(getTestRoot(), true);
|
||||
}
|
||||
|
||||
|
||||
|
||||
///////////////////////
|
||||
// Test Mkdir
|
||||
////////////////////////
|
||||
|
||||
@Test
|
||||
public void testMkdirNonRecursiveWithExistingDir() throws IOException {
|
||||
Path f = getTestRootRelativePath("aDir");
|
||||
fc.mkdir(f, FileContext.DEFAULT_PERM, false);
|
||||
Assert.assertTrue(fc.isDirectory(f));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMkdirNonRecursiveWithNonExistingDir() {
|
||||
try {
|
||||
fc.mkdir(getTestRootRelativePath("NonExistant/aDir"),
|
||||
FileContext.DEFAULT_PERM, false);
|
||||
Assert.fail("Mkdir with non existing parent dir should have failed");
|
||||
} catch (IOException e) {
|
||||
// failed As expected
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMkdirRecursiveWithExistingDir() throws IOException {
|
||||
Path f = getTestRootRelativePath("aDir");
|
||||
fc.mkdir(f, FileContext.DEFAULT_PERM, true);
|
||||
Assert.assertTrue(fc.isDirectory(f));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMkdirRecursiveWithNonExistingDir() throws IOException {
|
||||
Path f = getTestRootRelativePath("NonExistant2/aDir");
|
||||
fc.mkdir(f, FileContext.DEFAULT_PERM, true);
|
||||
Assert.assertTrue(fc.isDirectory(f));
|
||||
}
|
||||
|
||||
///////////////////////
|
||||
// Test Create
|
||||
////////////////////////
|
||||
@Test
|
||||
public void testCreateNonRecursiveWithExistingDir() throws IOException {
|
||||
Path f = getTestRootRelativePath("foo");
|
||||
createFile(f);
|
||||
Assert.assertTrue(fc.isFile(f));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateNonRecursiveWithNonExistingDir() {
|
||||
try {
|
||||
createFile(getTestRootRelativePath("NonExisting/foo"));
|
||||
Assert.fail("Create with non existing parent dir should have failed");
|
||||
} catch (IOException e) {
|
||||
// As expected
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateRecursiveWithExistingDir() throws IOException {
|
||||
Path f = getTestRootRelativePath("foo");
|
||||
createFile(f, CreateOpts.createParent());
|
||||
Assert.assertTrue(fc.isFile(f));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateRecursiveWithNonExistingDir() throws IOException {
|
||||
Path f = getTestRootRelativePath("NonExisting/foo");
|
||||
createFile(f, CreateOpts.createParent());
|
||||
Assert.assertTrue(fc.isFile(f));
|
||||
}
|
||||
|
||||
|
||||
protected static int getBlockSize() {
|
||||
return 1024;
|
||||
}
|
||||
|
||||
private static byte[] data = new byte[getBlockSize() * 2]; // two blocks of data
|
||||
{
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
data[i] = (byte) (i % 10);
|
||||
}
|
||||
}
|
||||
|
||||
protected void createFile(Path path,
|
||||
CreateOpts.CreateParent ... opt) throws IOException {
|
||||
|
||||
FSDataOutputStream out = fc.create(path,EnumSet.of(CreateFlag.CREATE), opt);
|
||||
out.write(data, 0, data.length);
|
||||
out.close();
|
||||
}
|
||||
}
|
|
@ -70,7 +70,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
fc.mkdirs(getTestRootPath("test"), FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(getTestRootPath("test"), FileContext.DEFAULT_PERM, true);
|
||||
}
|
||||
|
||||
@After
|
||||
|
@ -121,13 +121,13 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
// cd using a relative path
|
||||
Path relativeDir = new Path("existingDir1");
|
||||
Path absoluteDir = new Path(workDir.getParent(),"existingDir1");
|
||||
fc.mkdirs(absoluteDir, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true);
|
||||
fc.setWorkingDirectory(relativeDir);
|
||||
Assert.assertEquals(absoluteDir,
|
||||
fc.getWorkingDirectory());
|
||||
// cd using a absolute path
|
||||
absoluteDir = getTestRootPath("test/existingDir2");
|
||||
fc.mkdirs(absoluteDir, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true);
|
||||
fc.setWorkingDirectory(absoluteDir);
|
||||
Assert.assertEquals(absoluteDir, fc.getWorkingDirectory());
|
||||
|
||||
|
@ -146,7 +146,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
|
||||
// Try a URI
|
||||
absoluteDir = new Path("file:///tmp/existingDir");
|
||||
fc.mkdirs(absoluteDir, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true);
|
||||
fc.setWorkingDirectory(absoluteDir);
|
||||
Assert.assertEquals(absoluteDir, fc.getWorkingDirectory());
|
||||
|
||||
|
@ -158,12 +158,12 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
Assert.assertFalse(fc.exists(testDir));
|
||||
Assert.assertFalse(fc.isFile(testDir));
|
||||
|
||||
Assert.assertTrue(fc.mkdirs(testDir, FsPermission.getDefault()));
|
||||
fc.mkdir(testDir, FsPermission.getDefault(), true);
|
||||
|
||||
Assert.assertTrue(fc.exists(testDir));
|
||||
Assert.assertFalse(fc.isFile(testDir));
|
||||
|
||||
Assert.assertTrue(fc.mkdirs(testDir, FsPermission.getDefault()));
|
||||
fc.mkdir(testDir, FsPermission.getDefault(), true);
|
||||
|
||||
Assert.assertTrue(fc.exists(testDir));
|
||||
Assert.assertFalse(fc.isFile(testDir));
|
||||
|
@ -182,14 +182,14 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
|
||||
Path testDir = getTestRootPath("test/hadoop");
|
||||
Assert.assertFalse(fc.exists(testDir));
|
||||
Assert.assertTrue(fc.mkdirs(testDir, FsPermission.getDefault()));
|
||||
fc.mkdir(testDir, FsPermission.getDefault(), true);
|
||||
Assert.assertTrue(fc.exists(testDir));
|
||||
|
||||
createFile(getTestRootPath("test/hadoop/file"));
|
||||
|
||||
Path testSubDir = getTestRootPath("test/hadoop/file/subdir");
|
||||
try {
|
||||
fc.mkdirs(testSubDir, FsPermission.getDefault());
|
||||
fc.mkdir(testSubDir, FsPermission.getDefault(), true);
|
||||
Assert.fail("Should throw IOException.");
|
||||
} catch (IOException e) {
|
||||
// expected
|
||||
|
@ -198,7 +198,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
|
||||
Path testDeepSubDir = getTestRootPath("test/hadoop/file/deep/sub/dir");
|
||||
try {
|
||||
fc.mkdirs(testDeepSubDir, FsPermission.getDefault());
|
||||
fc.mkdir(testDeepSubDir, FsPermission.getDefault(), true);
|
||||
Assert.fail("Should throw IOException.");
|
||||
} catch (IOException e) {
|
||||
// expected
|
||||
|
@ -236,7 +236,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
Assert.assertFalse(fc.exists(testDirs[0]));
|
||||
|
||||
for (Path path : testDirs) {
|
||||
Assert.assertTrue(fc.mkdirs(path, FsPermission.getDefault()));
|
||||
fc.mkdir(path, FsPermission.getDefault(), true);
|
||||
}
|
||||
|
||||
FileStatus[] paths = fc.listStatus(getTestRootPath("test"));
|
||||
|
@ -290,7 +290,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
private void writeReadAndDelete(int len) throws IOException {
|
||||
Path path = getTestRootPath("test/hadoop/file");
|
||||
|
||||
fc.mkdirs(path.getParent(), FsPermission.getDefault());
|
||||
fc.mkdir(path.getParent(), FsPermission.getDefault(), true);
|
||||
|
||||
FSDataOutputStream out = fc.create(path, EnumSet.of(CreateFlag.CREATE),
|
||||
CreateOpts.repFac((short) 1), CreateOpts.blockSize(getBlockSize()));
|
||||
|
@ -320,7 +320,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
public void testOverwrite() throws IOException {
|
||||
Path path = getTestRootPath("test/hadoop/file");
|
||||
|
||||
fc.mkdirs(path.getParent(), FsPermission.getDefault());
|
||||
fc.mkdir(path.getParent(), FsPermission.getDefault(), true);
|
||||
|
||||
createFile(path);
|
||||
|
||||
|
@ -368,7 +368,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
Path subdir = getTestRootPath("test/hadoop/subdir");
|
||||
|
||||
createFile(file);
|
||||
Assert.assertTrue("Created subdir", fc.mkdirs(subdir, FsPermission.getDefault()));
|
||||
fc.mkdir(subdir,FsPermission.getDefault(), true);
|
||||
|
||||
Assert.assertTrue("File exists", fc.exists(file));
|
||||
Assert.assertTrue("Dir exists", fc.exists(dir));
|
||||
|
@ -393,7 +393,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
@Test
|
||||
public void testDeleteEmptyDirectory() throws IOException {
|
||||
Path dir = getTestRootPath("test/hadoop");
|
||||
Assert.assertTrue(fc.mkdirs(dir, FsPermission.getDefault()));
|
||||
fc.mkdir(dir, FsPermission.getDefault(), true);
|
||||
Assert.assertTrue("Dir exists", fc.exists(dir));
|
||||
Assert.assertTrue("Deleted", fc.delete(dir, false));
|
||||
Assert.assertFalse("Dir doesn't exist", fc.exists(dir));
|
||||
|
@ -471,7 +471,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
Path src = getTestRootPath("test/hadoop/file");
|
||||
createFile(src);
|
||||
Path dst = getTestRootPath("test/new/newfile");
|
||||
fc.mkdirs(dst.getParent(), FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(dst.getParent(), FileContext.DEFAULT_PERM, true);
|
||||
rename(src, dst, true, false, true, Rename.OVERWRITE);
|
||||
}
|
||||
|
||||
|
@ -503,7 +503,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
Path src = getTestRootPath("test/hadoop/file");
|
||||
createFile(src);
|
||||
Path dst = getTestRootPath("test/new/existingDir");
|
||||
fc.mkdirs(dst, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(dst, FileContext.DEFAULT_PERM, true);
|
||||
|
||||
// Fails without overwrite option
|
||||
try {
|
||||
|
@ -525,7 +525,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
if (!renameSupported()) return;
|
||||
|
||||
Path src = getTestRootPath("test/hadoop/dir");
|
||||
fc.mkdirs(src, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(src, FileContext.DEFAULT_PERM, true);
|
||||
Path dst = getTestRootPath("test/nonExistent/newdir");
|
||||
|
||||
try {
|
||||
|
@ -554,12 +554,12 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
if (!renameSupported()) return;
|
||||
|
||||
Path src = getTestRootPath("test/hadoop/dir");
|
||||
fc.mkdirs(src, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(src, FileContext.DEFAULT_PERM, true);
|
||||
createFile(getTestRootPath("test/hadoop/dir/file1"));
|
||||
createFile(getTestRootPath("test/hadoop/dir/subdir/file2"));
|
||||
|
||||
Path dst = getTestRootPath("test/new/newdir");
|
||||
fc.mkdirs(dst.getParent(), FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(dst.getParent(), FileContext.DEFAULT_PERM, true);
|
||||
|
||||
rename(src, dst, true, false, true, options);
|
||||
Assert.assertFalse("Nested file1 exists",
|
||||
|
@ -577,12 +577,12 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
if (!renameSupported()) return;
|
||||
|
||||
Path src = getTestRootPath("test/hadoop/dir");
|
||||
fc.mkdirs(src, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(src, FileContext.DEFAULT_PERM, true);
|
||||
createFile(getTestRootPath("test/hadoop/dir/file1"));
|
||||
createFile(getTestRootPath("test/hadoop/dir/subdir/file2"));
|
||||
|
||||
Path dst = getTestRootPath("test/new/newdir");
|
||||
fc.mkdirs(dst, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(dst, FileContext.DEFAULT_PERM, true);
|
||||
createFile(getTestRootPath("test/new/newdir/file1"));
|
||||
// Fails without overwrite option
|
||||
try {
|
||||
|
@ -605,7 +605,7 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
if (!renameSupported()) return;
|
||||
|
||||
Path src = getTestRootPath("test/hadoop/dir");
|
||||
fc.mkdirs(src, FileContext.DEFAULT_PERM);
|
||||
fc.mkdir(src, FileContext.DEFAULT_PERM, true);
|
||||
Path dst = getTestRootPath("test/new/newfile");
|
||||
createFile(dst);
|
||||
// Fails without overwrite option
|
||||
|
@ -638,14 +638,17 @@ public abstract class FileContextMainOperationsBaseTest {
|
|||
//HADOOP-4760 according to Closeable#close() closing already-closed
|
||||
//streams should have no effect.
|
||||
Path src = getTestRootPath("test/hadoop/file");
|
||||
FSDataOutputStream out = fc.create(src, EnumSet.of(CreateFlag.CREATE));
|
||||
FSDataOutputStream out = fc.create(src, EnumSet.of(CreateFlag.CREATE),
|
||||
Options.CreateOpts.createParent());
|
||||
|
||||
out.writeChar('H'); //write some data
|
||||
out.close();
|
||||
out.close();
|
||||
}
|
||||
|
||||
protected void createFile(Path path) throws IOException {
|
||||
FSDataOutputStream out = fc.create(path, EnumSet.of(CreateFlag.CREATE));
|
||||
FSDataOutputStream out = fc.create(path, EnumSet.of(CreateFlag.CREATE),
|
||||
Options.CreateOpts.createParent());
|
||||
out.write(data, 0, data.length);
|
||||
out.close();
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Set;
|
|||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.fs.Options.CreateOpts;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -62,7 +63,8 @@ public class TestFileContextDeleteOnExit {
|
|||
}
|
||||
|
||||
private void createFile(FileContext fc, Path path) throws IOException {
|
||||
FSDataOutputStream out = fc.create(path, EnumSet.of(CreateFlag.CREATE));
|
||||
FSDataOutputStream out = fc.create(path, EnumSet.of(CreateFlag.CREATE),
|
||||
CreateOpts.createParent());
|
||||
out.write(data, 0, data.length);
|
||||
out.close();
|
||||
}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
|
||||
import org.junit.Before;
|
||||
|
||||
public class TestLocalFSFileContextCreateMkdir extends
|
||||
FileContextCreateMkdirBaseTest {
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
fc = FileContext.getLocalFSFileContext();
|
||||
super.setUp();
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue