HADOOP-9361: Strictly define FileSystem APIs - OpenStack portion
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1607625 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
d6fd16c8dc
commit
e87785aea4
|
@ -19,9 +19,11 @@
|
||||||
package org.apache.hadoop.fs.swift.snative;
|
package org.apache.hadoop.fs.swift.snative;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.BufferedFSInputStream;
|
import org.apache.hadoop.fs.BufferedFSInputStream;
|
||||||
|
import org.apache.hadoop.fs.FSExceptionMessages;
|
||||||
import org.apache.hadoop.fs.FSInputStream;
|
import org.apache.hadoop.fs.FSInputStream;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftConnectionClosedException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftConnectionClosedException;
|
||||||
|
|
||||||
|
import java.io.EOFException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -37,10 +39,10 @@ public class StrictBufferedFSInputStream extends BufferedFSInputStream {
|
||||||
@Override
|
@Override
|
||||||
public void seek(long pos) throws IOException {
|
public void seek(long pos) throws IOException {
|
||||||
if (pos < 0) {
|
if (pos < 0) {
|
||||||
throw new IOException("Negative position");
|
throw new EOFException(FSExceptionMessages.NEGATIVE_SEEK);
|
||||||
}
|
}
|
||||||
if (in == null) {
|
if (in == null) {
|
||||||
throw new SwiftConnectionClosedException("Stream closed");
|
throw new SwiftConnectionClosedException(FSExceptionMessages.STREAM_IS_CLOSED);
|
||||||
}
|
}
|
||||||
super.seek(pos);
|
super.seek(pos);
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,14 +25,14 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.BlockLocation;
|
import org.apache.hadoop.fs.BlockLocation;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.ParentNotDirectoryException;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftNotDirectoryException;
|
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftOperationFailedException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftOperationFailedException;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftPathExistsException;
|
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftUnsupportedFeatureException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftUnsupportedFeatureException;
|
||||||
import org.apache.hadoop.fs.swift.http.SwiftProtocolConstants;
|
import org.apache.hadoop.fs.swift.http.SwiftProtocolConstants;
|
||||||
import org.apache.hadoop.fs.swift.util.DurationStats;
|
import org.apache.hadoop.fs.swift.util.DurationStats;
|
||||||
|
@ -373,7 +373,7 @@ public class SwiftNativeFileSystem extends FileSystem {
|
||||||
* @param directory path to query
|
* @param directory path to query
|
||||||
* @return true iff the directory should be created
|
* @return true iff the directory should be created
|
||||||
* @throws IOException IO problems
|
* @throws IOException IO problems
|
||||||
* @throws SwiftNotDirectoryException if the path references a file
|
* @throws ParentNotDirectoryException if the path references a file
|
||||||
*/
|
*/
|
||||||
private boolean shouldCreate(Path directory) throws IOException {
|
private boolean shouldCreate(Path directory) throws IOException {
|
||||||
FileStatus fileStatus;
|
FileStatus fileStatus;
|
||||||
|
@ -388,9 +388,9 @@ public class SwiftNativeFileSystem extends FileSystem {
|
||||||
|
|
||||||
if (!SwiftUtils.isDirectory(fileStatus)) {
|
if (!SwiftUtils.isDirectory(fileStatus)) {
|
||||||
//if it's a file, raise an error
|
//if it's a file, raise an error
|
||||||
throw new SwiftNotDirectoryException(directory,
|
throw new ParentNotDirectoryException(
|
||||||
String.format(": can't mkdir since it exists and is not a directory: %s",
|
String.format("%s: can't mkdir since it exists and is not a directory: %s",
|
||||||
fileStatus));
|
directory, fileStatus));
|
||||||
} else {
|
} else {
|
||||||
//path exists, and it is a directory
|
//path exists, and it is a directory
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
|
@ -488,7 +488,7 @@ public class SwiftNativeFileSystem extends FileSystem {
|
||||||
//overwrite set -> delete the object.
|
//overwrite set -> delete the object.
|
||||||
store.delete(absolutePath, true);
|
store.delete(absolutePath, true);
|
||||||
} else {
|
} else {
|
||||||
throw new SwiftPathExistsException("Path exists: " + file);
|
throw new FileAlreadyExistsException("Path exists: " + file);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// destination does not exist -trigger creation of the parent
|
// destination does not exist -trigger creation of the parent
|
||||||
|
@ -580,6 +580,9 @@ public class SwiftNativeFileSystem extends FileSystem {
|
||||||
} catch (SwiftOperationFailedException e) {
|
} catch (SwiftOperationFailedException e) {
|
||||||
//downgrade to a failure
|
//downgrade to a failure
|
||||||
return false;
|
return false;
|
||||||
|
} catch (FileAlreadyExistsException e) {
|
||||||
|
//downgrade to a failure
|
||||||
|
return false;
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
//downgrade to a failure
|
//downgrade to a failure
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.commons.httpclient.HttpStatus;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
|
||||||
|
@ -590,7 +591,7 @@ public class SwiftNativeFileSystemStore {
|
||||||
} else {
|
} else {
|
||||||
//outcome #1 dest it's a file: fail if differeent
|
//outcome #1 dest it's a file: fail if differeent
|
||||||
if (!renamingOnToSelf) {
|
if (!renamingOnToSelf) {
|
||||||
throw new SwiftOperationFailedException(
|
throw new FileAlreadyExistsException(
|
||||||
"cannot rename a file over one that already exists");
|
"cannot rename a file over one that already exists");
|
||||||
} else {
|
} else {
|
||||||
//is mv self self where self is a file. this becomes a no-op
|
//is mv self self where self is a file. this becomes a no-op
|
||||||
|
@ -633,7 +634,7 @@ public class SwiftNativeFileSystemStore {
|
||||||
|
|
||||||
if (destExists && !destIsDir) {
|
if (destExists && !destIsDir) {
|
||||||
// #1 destination is a file: fail
|
// #1 destination is a file: fail
|
||||||
throw new SwiftOperationFailedException(
|
throw new FileAlreadyExistsException(
|
||||||
"the source is a directory, but not the destination");
|
"the source is a directory, but not the destination");
|
||||||
}
|
}
|
||||||
Path targetPath;
|
Path targetPath;
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.fs.swift.snative;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.fs.FSExceptionMessages;
|
||||||
import org.apache.hadoop.fs.FSInputStream;
|
import org.apache.hadoop.fs.FSInputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -298,7 +299,8 @@ class SwiftNativeInputStream extends FSInputStream {
|
||||||
@Override
|
@Override
|
||||||
public synchronized void seek(long targetPos) throws IOException {
|
public synchronized void seek(long targetPos) throws IOException {
|
||||||
if (targetPos < 0) {
|
if (targetPos < 0) {
|
||||||
throw new IOException("Negative Seek offset not supported");
|
throw new EOFException(
|
||||||
|
FSExceptionMessages.NEGATIVE_SEEK);
|
||||||
}
|
}
|
||||||
//there's some special handling of near-local data
|
//there's some special handling of near-local data
|
||||||
//as the seek can be omitted if it is in/adjacent
|
//as the seek can be omitted if it is in/adjacent
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.swift.exceptions.SwiftConnectionClosedException;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftException;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftInternalStateException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftInternalStateException;
|
||||||
import org.apache.hadoop.fs.swift.util.SwiftUtils;
|
import org.apache.hadoop.fs.swift.util.SwiftUtils;
|
||||||
|
@ -109,7 +110,7 @@ class SwiftNativeOutputStream extends OutputStream {
|
||||||
*/
|
*/
|
||||||
private synchronized void verifyOpen() throws SwiftException {
|
private synchronized void verifyOpen() throws SwiftException {
|
||||||
if (closed) {
|
if (closed) {
|
||||||
throw new SwiftException("Output stream is closed");
|
throw new SwiftConnectionClosedException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,9 @@ package org.apache.hadoop.fs.swift;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
import org.apache.hadoop.fs.ParentNotDirectoryException;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftBadRequestException;
|
import org.apache.hadoop.fs.swift.exceptions.SwiftBadRequestException;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftNotDirectoryException;
|
|
||||||
import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
|
import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
|
||||||
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
|
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -245,7 +245,7 @@ public class TestSwiftFileSystemBasicOps extends SwiftFileSystemBaseTest {
|
||||||
writeTextFile(fs, path, "parent", true);
|
writeTextFile(fs, path, "parent", true);
|
||||||
try {
|
try {
|
||||||
fs.mkdirs(child);
|
fs.mkdirs(child);
|
||||||
} catch (SwiftNotDirectoryException expected) {
|
} catch (ParentNotDirectoryException expected) {
|
||||||
LOG.debug("Expected Exception", expected);
|
LOG.debug("Expected Exception", expected);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
@ -23,8 +23,8 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystemContractBaseTest;
|
import org.apache.hadoop.fs.FileSystemContractBaseTest;
|
||||||
|
import org.apache.hadoop.fs.ParentNotDirectoryException;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.swift.exceptions.SwiftNotDirectoryException;
|
|
||||||
import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
|
import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
|
||||||
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
|
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
|
||||||
|
|
||||||
|
@ -47,6 +47,14 @@ public class TestSwiftFileSystemContract
|
||||||
private static final Log LOG =
|
private static final Log LOG =
|
||||||
LogFactory.getLog(TestSwiftFileSystemContract.class);
|
LogFactory.getLog(TestSwiftFileSystemContract.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override this if the filesystem is not case sensitive
|
||||||
|
* @return true if the case detection/preservation tests should run
|
||||||
|
*/
|
||||||
|
protected boolean filesystemIsCaseSensitive() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void setUp() throws Exception {
|
protected void setUp() throws Exception {
|
||||||
final URI uri = getFilesystemURI();
|
final URI uri = getFilesystemURI();
|
||||||
|
@ -89,9 +97,8 @@ public class TestSwiftFileSystemContract
|
||||||
try {
|
try {
|
||||||
fs.mkdirs(testSubDir);
|
fs.mkdirs(testSubDir);
|
||||||
fail("Should throw IOException.");
|
fail("Should throw IOException.");
|
||||||
} catch (SwiftNotDirectoryException e) {
|
} catch (ParentNotDirectoryException e) {
|
||||||
// expected
|
// expected
|
||||||
assertEquals(filepath,e.getPath());
|
|
||||||
}
|
}
|
||||||
//now verify that the subdir path does not exist
|
//now verify that the subdir path does not exist
|
||||||
SwiftTestUtils.assertPathDoesNotExist(fs, "subdir after mkdir", testSubDir);
|
SwiftTestUtils.assertPathDoesNotExist(fs, "subdir after mkdir", testSubDir);
|
||||||
|
@ -100,7 +107,7 @@ public class TestSwiftFileSystemContract
|
||||||
try {
|
try {
|
||||||
fs.mkdirs(testDeepSubDir);
|
fs.mkdirs(testDeepSubDir);
|
||||||
fail("Should throw IOException.");
|
fail("Should throw IOException.");
|
||||||
} catch (SwiftNotDirectoryException e) {
|
} catch (ParentNotDirectoryException e) {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
SwiftTestUtils.assertPathDoesNotExist(fs, "testDeepSubDir after mkdir",
|
SwiftTestUtils.assertPathDoesNotExist(fs, "testDeepSubDir after mkdir",
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.fs.swift;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.swift.exceptions.SwiftOperationFailedException;
|
||||||
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
|
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -220,7 +221,11 @@ public class TestSwiftFileSystemRename extends SwiftFileSystemBaseTest {
|
||||||
fs.mkdirs(testdir);
|
fs.mkdirs(testdir);
|
||||||
Path parent = testdir.getParent();
|
Path parent = testdir.getParent();
|
||||||
//the outcome here is ambiguous, so is not checked
|
//the outcome here is ambiguous, so is not checked
|
||||||
fs.rename(testdir, parent);
|
try {
|
||||||
|
fs.rename(testdir, parent);
|
||||||
|
} catch (SwiftOperationFailedException e) {
|
||||||
|
// allowed
|
||||||
|
}
|
||||||
assertExists("Source directory has been deleted ", testdir);
|
assertExists("Source directory has been deleted ", testdir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractBondedFSContract;
|
||||||
|
import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The contract of OpenStack Swift: only enabled if the test binding data is provided
|
||||||
|
*/
|
||||||
|
public class SwiftContract extends AbstractBondedFSContract {
|
||||||
|
|
||||||
|
public static final String CONTRACT_XML = "contract/swift.xml";
|
||||||
|
|
||||||
|
public SwiftContract(Configuration conf) {
|
||||||
|
super(conf);
|
||||||
|
//insert the base features
|
||||||
|
addConfResource(CONTRACT_XML);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getScheme() {
|
||||||
|
return SwiftNativeFileSystem.SWIFT;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
import org.apache.hadoop.fs.contract.ContractTestUtils;
|
||||||
|
|
||||||
|
public class TestSwiftContractCreate extends AbstractContractCreateTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new SwiftContract(conf);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testOverwriteEmptyDirectory() throws Throwable {
|
||||||
|
ContractTestUtils.skip("blobstores can't distinguish empty directories from files");
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
|
||||||
|
public class TestSwiftContractDelete extends AbstractContractDeleteTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new SwiftContract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,28 +16,19 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.fs.swift.exceptions;
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Exception raised when an operation is meant to work on a directory, but
|
* Test dir operations on S3
|
||||||
* the target path is not a directory
|
|
||||||
*/
|
*/
|
||||||
public class SwiftNotDirectoryException extends SwiftException {
|
public class TestSwiftContractMkdir extends AbstractContractMkdirTest {
|
||||||
private final Path path;
|
|
||||||
|
|
||||||
public SwiftNotDirectoryException(Path path) {
|
@Override
|
||||||
this(path, "");
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
}
|
return new SwiftContract(conf);
|
||||||
|
|
||||||
public SwiftNotDirectoryException(Path path,
|
|
||||||
String message) {
|
|
||||||
super(path.toString() + message);
|
|
||||||
this.path = path;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Path getPath() {
|
|
||||||
return path;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
import org.apache.hadoop.fs.contract.ContractTestUtils;
|
||||||
|
|
||||||
|
public class TestSwiftContractOpen extends AbstractContractOpenTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new SwiftContract(conf);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testOpenReadDir() throws Throwable {
|
||||||
|
ContractTestUtils.skip("Skipping object-store quirk");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testOpenReadDirWithChild() throws Throwable {
|
||||||
|
ContractTestUtils.skip("Skipping object-store quirk");
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
|
||||||
|
public class TestSwiftContractRename extends AbstractContractRenameTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new SwiftContract(conf);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* root dir operations against an S3 bucket
|
||||||
|
*/
|
||||||
|
public class TestSwiftContractRootDir extends
|
||||||
|
AbstractContractRootDirectoryTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new SwiftContract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,18 +16,16 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.fs.swift.exceptions;
|
package org.apache.hadoop.fs.swift.contract;
|
||||||
|
|
||||||
/**
|
import org.apache.hadoop.conf.Configuration;
|
||||||
* Exception raised when trying to create a file that already exists
|
import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
|
||||||
* and the overwrite flag is set to false.
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
*/
|
|
||||||
public class SwiftPathExistsException extends SwiftException {
|
|
||||||
public SwiftPathExistsException(String message) {
|
|
||||||
super(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
public SwiftPathExistsException(String message, Throwable cause) {
|
public class TestSwiftContractSeek extends AbstractContractSeekTest {
|
||||||
super(message, cause);
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new SwiftContract(conf);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -111,7 +111,7 @@ public class TestV2LsOperations extends SwiftFileSystemBaseTest {
|
||||||
@Test(timeout = SWIFT_TEST_TIMEOUT)
|
@Test(timeout = SWIFT_TEST_TIMEOUT)
|
||||||
public void testListFilesSubDir() throws Throwable {
|
public void testListFilesSubDir() throws Throwable {
|
||||||
createTestSubdirs();
|
createTestSubdirs();
|
||||||
Path dir = path("/test");
|
Path dir = path("/test/subdir");
|
||||||
Path child = new Path(dir, "text.txt");
|
Path child = new Path(dir, "text.txt");
|
||||||
SwiftTestUtils.writeTextFile(fs, child, "text", false);
|
SwiftTestUtils.writeTextFile(fs, child, "text", false);
|
||||||
assertListFilesFinds(fs, dir, child, false);
|
assertListFilesFinds(fs, dir, child, false);
|
||||||
|
@ -120,7 +120,7 @@ public class TestV2LsOperations extends SwiftFileSystemBaseTest {
|
||||||
@Test(timeout = SWIFT_TEST_TIMEOUT)
|
@Test(timeout = SWIFT_TEST_TIMEOUT)
|
||||||
public void testListFilesRecursive() throws Throwable {
|
public void testListFilesRecursive() throws Throwable {
|
||||||
createTestSubdirs();
|
createTestSubdirs();
|
||||||
Path dir = path("/test");
|
Path dir = path("/test/recursive");
|
||||||
Path child = new Path(dir, "hadoop/a/a.txt");
|
Path child = new Path(dir, "hadoop/a/a.txt");
|
||||||
SwiftTestUtils.writeTextFile(fs, child, "text", false);
|
SwiftTestUtils.writeTextFile(fs, child, "text", false);
|
||||||
assertListFilesFinds(fs, dir, child, true);
|
assertListFilesFinds(fs, dir, child, true);
|
||||||
|
|
|
@ -0,0 +1,95 @@
|
||||||
|
<!--
|
||||||
|
~ Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
~ or more contributor license agreements. See the NOTICE file
|
||||||
|
~ distributed with this work for additional information
|
||||||
|
~ regarding copyright ownership. The ASF licenses this file
|
||||||
|
~ to you under the Apache License, Version 2.0 (the
|
||||||
|
~ "License"); you may not use this file except in compliance
|
||||||
|
~ with the License. You may obtain a copy of the License at
|
||||||
|
~
|
||||||
|
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
~
|
||||||
|
~ Unless required by applicable law or agreed to in writing, software
|
||||||
|
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
~ See the License for the specific language governing permissions and
|
||||||
|
~ limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<configuration>
|
||||||
|
<!--
|
||||||
|
Openstack Swift is a blobstore, with very different behavior than a
|
||||||
|
classic filesystem.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.test.root-tests-enabled</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.test.random-seek-count</name>
|
||||||
|
<value>10</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.is-blobstore</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.is-case-sensitive</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-append</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-atomic-directory-delete</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-atomic-rename</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-block-locality</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-concat</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-seek</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.rejects-seek-past-eof</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-strict-exceptions</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-unix-permissions</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.rename-returns-false-if-source-missing</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</configuration>
|
Loading…
Reference in New Issue