HADOOP-12696. Add tests for S3FileSystem Contract. Contributed by Matt Paduano
(cherry picked from commit1acc509b45
) (cherry picked from commit2cbb8bbd72
)
This commit is contained in:
parent
5dc2e78c97
commit
a359dc87d4
|
@ -380,6 +380,8 @@ Release 2.8.0 - UNRELEASED
|
||||||
HADOOP-12604. Exception may be swallowed in KMSClientProvider.
|
HADOOP-12604. Exception may be swallowed in KMSClientProvider.
|
||||||
(Yongjun Zhang)
|
(Yongjun Zhang)
|
||||||
|
|
||||||
|
HADOOP-12696. Add tests for S3Filesystem Contract (Matt Paduano via raviprak)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HADOOP-11785. Reduce the number of listStatus operation in distcp
|
HADOOP-11785. Reduce the number of listStatus operation in distcp
|
||||||
|
|
|
@ -190,7 +190,7 @@ tests against remote FileSystems that require login details require usernames/ID
|
||||||
|
|
||||||
All these details MUST be required to be placed in the file `src/test/resources/contract-test-options.xml`, and your SCM tools configured to never commit this file to subversion, git or
|
All these details MUST be required to be placed in the file `src/test/resources/contract-test-options.xml`, and your SCM tools configured to never commit this file to subversion, git or
|
||||||
equivalent. Furthermore, the build MUST be configured to never bundle this file in any `-test` artifacts generated. The Hadoop build does this, excluding `src/test/**/*.xml` from the JAR files.
|
equivalent. Furthermore, the build MUST be configured to never bundle this file in any `-test` artifacts generated. The Hadoop build does this, excluding `src/test/**/*.xml` from the JAR files.
|
||||||
|
In addition, `src/test/resources/auth-keys.xml` will need to be created. It can be a copy of `contract-test-options.xml`.
|
||||||
The `AbstractFSContract` class automatically loads this resource file if present; specific keys for specific test cases can be added.
|
The `AbstractFSContract` class automatically loads this resource file if present; specific keys for specific test cases can be added.
|
||||||
|
|
||||||
As an example, here are what S3N test keys look like:
|
As an example, here are what S3N test keys look like:
|
||||||
|
@ -214,7 +214,7 @@ As an example, here are what S3N test keys look like:
|
||||||
|
|
||||||
The `AbstractBondedFSContract` automatically skips a test suite if the FileSystem URL is not defined in the property `fs.contract.test.fs.%s`, where `%s` matches the schema name of the FileSystem.
|
The `AbstractBondedFSContract` automatically skips a test suite if the FileSystem URL is not defined in the property `fs.contract.test.fs.%s`, where `%s` matches the schema name of the FileSystem.
|
||||||
|
|
||||||
|
When running the tests `maven.test.skip` will need to be turned off since it is true by default on these tests. This can be done with a command like `mvn test -Ptests-on`.
|
||||||
|
|
||||||
### Important: passing the tests does not guarantee compatibility
|
### Important: passing the tests does not guarantee compatibility
|
||||||
|
|
||||||
|
|
|
@ -116,15 +116,13 @@ public abstract class AbstractContractSeekTest extends AbstractFSContractTestBas
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testSeekReadClosedFile() throws Throwable {
|
public void testSeekReadClosedFile() throws Throwable {
|
||||||
boolean supportsSeekOnClosedFiles = isSupported(SUPPORTS_SEEK_ON_CLOSED_FILE);
|
|
||||||
|
|
||||||
instream = getFileSystem().open(smallSeekFile);
|
instream = getFileSystem().open(smallSeekFile);
|
||||||
getLog().debug(
|
getLog().debug(
|
||||||
"Stream is of type " + instream.getClass().getCanonicalName());
|
"Stream is of type " + instream.getClass().getCanonicalName());
|
||||||
instream.close();
|
instream.close();
|
||||||
try {
|
try {
|
||||||
instream.seek(0);
|
instream.seek(0);
|
||||||
if (!supportsSeekOnClosedFiles) {
|
if (!isSupported(SUPPORTS_SEEK_ON_CLOSED_FILE)) {
|
||||||
fail("seek succeeded on a closed stream");
|
fail("seek succeeded on a closed stream");
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -132,7 +130,9 @@ public abstract class AbstractContractSeekTest extends AbstractFSContractTestBas
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
int data = instream.available();
|
int data = instream.available();
|
||||||
fail("read() succeeded on a closed stream, got " + data);
|
if (!isSupported(SUPPORTS_AVAILABLE_ON_CLOSED_FILE)) {
|
||||||
|
fail("available() succeeded on a closed stream, got " + data);
|
||||||
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
//expected a closed file
|
//expected a closed file
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,6 +147,12 @@ public interface ContractOptions {
|
||||||
*/
|
*/
|
||||||
String SUPPORTS_SEEK_ON_CLOSED_FILE = "supports-seek-on-closed-file";
|
String SUPPORTS_SEEK_ON_CLOSED_FILE = "supports-seek-on-closed-file";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is available() on a closed InputStream supported?
|
||||||
|
* @{value}
|
||||||
|
*/
|
||||||
|
String SUPPORTS_AVAILABLE_ON_CLOSED_FILE = "supports-available-on-closed-file";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Flag to indicate that this FS expects to throw the strictest
|
* Flag to indicate that this FS expects to throw the strictest
|
||||||
* exceptions it can, not generic IOEs, which, if returned,
|
* exceptions it can, not generic IOEs, which, if returned,
|
||||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.ParentNotDirectoryException;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.fs.s3native.NativeS3FileSystem;
|
import org.apache.hadoop.fs.s3native.NativeS3FileSystem;
|
||||||
|
@ -146,8 +147,16 @@ public class S3FileSystem extends FileSystem {
|
||||||
} while (absolutePath != null);
|
} while (absolutePath != null);
|
||||||
|
|
||||||
boolean result = true;
|
boolean result = true;
|
||||||
for (Path p : paths) {
|
for (int i = 0; i < paths.size(); i++) {
|
||||||
result &= mkdir(p);
|
Path p = paths.get(i);
|
||||||
|
try {
|
||||||
|
result &= mkdir(p);
|
||||||
|
} catch(FileAlreadyExistsException e) {
|
||||||
|
if (i + 1 < paths.size()) {
|
||||||
|
throw new ParentNotDirectoryException(e.getMessage());
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -158,7 +167,7 @@ public class S3FileSystem extends FileSystem {
|
||||||
if (inode == null) {
|
if (inode == null) {
|
||||||
store.storeINode(absolutePath, INode.DIRECTORY_INODE);
|
store.storeINode(absolutePath, INode.DIRECTORY_INODE);
|
||||||
} else if (inode.isFile()) {
|
} else if (inode.isFile()) {
|
||||||
throw new IOException(String.format(
|
throw new FileAlreadyExistsException(String.format(
|
||||||
"Can't make directory for path %s since it is a file.",
|
"Can't make directory for path %s since it is a file.",
|
||||||
absolutePath));
|
absolutePath));
|
||||||
}
|
}
|
||||||
|
@ -176,11 +185,12 @@ public class S3FileSystem extends FileSystem {
|
||||||
|
|
||||||
private INode checkFile(Path path) throws IOException {
|
private INode checkFile(Path path) throws IOException {
|
||||||
INode inode = store.retrieveINode(makeAbsolute(path));
|
INode inode = store.retrieveINode(makeAbsolute(path));
|
||||||
|
String message = String.format("No such file: '%s'", path.toString());
|
||||||
if (inode == null) {
|
if (inode == null) {
|
||||||
throw new IOException("No such file.");
|
throw new FileNotFoundException(message + " does not exist");
|
||||||
}
|
}
|
||||||
if (inode.isDirectory()) {
|
if (inode.isDirectory()) {
|
||||||
throw new IOException("Path " + path + " is a directory.");
|
throw new FileNotFoundException(message + " is a directory");
|
||||||
}
|
}
|
||||||
return inode;
|
return inode;
|
||||||
}
|
}
|
||||||
|
@ -222,10 +232,14 @@ public class S3FileSystem extends FileSystem {
|
||||||
|
|
||||||
INode inode = store.retrieveINode(makeAbsolute(file));
|
INode inode = store.retrieveINode(makeAbsolute(file));
|
||||||
if (inode != null) {
|
if (inode != null) {
|
||||||
if (overwrite) {
|
if (overwrite && !inode.isDirectory()) {
|
||||||
delete(file, true);
|
delete(file, true);
|
||||||
} else {
|
} else {
|
||||||
throw new FileAlreadyExistsException("File already exists: " + file);
|
String message = String.format("File already exists: '%s'", file);
|
||||||
|
if (inode.isDirectory()) {
|
||||||
|
message = message + " is a directory";
|
||||||
|
}
|
||||||
|
throw new FileAlreadyExistsException(message);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Path parent = file.getParent();
|
Path parent = file.getParent();
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.DataInputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.EOFException;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -86,8 +87,12 @@ class S3InputStream extends FSInputStream {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void seek(long targetPos) throws IOException {
|
public synchronized void seek(long targetPos) throws IOException {
|
||||||
|
String message = String.format("Cannot seek to %d", targetPos);
|
||||||
if (targetPos > fileLength) {
|
if (targetPos > fileLength) {
|
||||||
throw new IOException("Cannot seek after EOF");
|
throw new EOFException(message + ": after EOF");
|
||||||
|
}
|
||||||
|
if (targetPos < 0) {
|
||||||
|
throw new EOFException(message + ": negative");
|
||||||
}
|
}
|
||||||
pos = targetPos;
|
pos = targetPos;
|
||||||
blockEnd = -1;
|
blockEnd = -1;
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractBondedFSContract;
|
||||||
|
|
||||||
|
public class S3Contract extends AbstractBondedFSContract {
|
||||||
|
|
||||||
|
public static final String CONTRACT_XML = "contract/s3.xml";
|
||||||
|
|
||||||
|
|
||||||
|
public S3Contract(Configuration conf) {
|
||||||
|
super(conf);
|
||||||
|
//insert the base features
|
||||||
|
addConfResource(CONTRACT_XML);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getScheme() {
|
||||||
|
return "s3";
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
import org.apache.hadoop.fs.contract.ContractTestUtils;
|
||||||
|
|
||||||
|
public class TestS3ContractCreate extends AbstractContractCreateTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new S3Contract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
|
||||||
|
public class TestS3ContractDelete extends AbstractContractDeleteTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new S3Contract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
import org.apache.hadoop.fs.contract.ContractTestUtils;
|
||||||
|
|
||||||
|
public class TestS3ContractMkdir extends AbstractContractMkdirTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new S3Contract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
import org.apache.hadoop.fs.contract.ContractTestUtils;
|
||||||
|
|
||||||
|
public class TestS3ContractOpen extends AbstractContractOpenTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new S3Contract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
|
||||||
|
public class TestS3ContractRename extends AbstractContractRenameTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new S3Contract(conf);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* root dir operations against an S3 bucket
|
||||||
|
*/
|
||||||
|
public class TestS3ContractRootDir extends AbstractContractRootDirectoryTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new S3Contract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.fs.contract.s3;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
|
||||||
|
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||||
|
import org.apache.hadoop.fs.contract.ContractTestUtils;
|
||||||
|
|
||||||
|
public class TestS3ContractSeek extends AbstractContractSeekTest {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractFSContract createContract(Configuration conf) {
|
||||||
|
return new S3Contract(conf);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,104 @@
|
||||||
|
<!--
|
||||||
|
~ Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
~ or more contributor license agreements. See the NOTICE file
|
||||||
|
~ distributed with this work for additional information
|
||||||
|
~ regarding copyright ownership. The ASF licenses this file
|
||||||
|
~ to you under the Apache License, Version 2.0 (the
|
||||||
|
~ "License"); you may not use this file except in compliance
|
||||||
|
~ with the License. You may obtain a copy of the License at
|
||||||
|
~
|
||||||
|
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
~
|
||||||
|
~ Unless required by applicable law or agreed to in writing, software
|
||||||
|
~ distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
~ See the License for the specific language governing permissions and
|
||||||
|
~ limitations under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<configuration>
|
||||||
|
<!--
|
||||||
|
S3 is backed by a blobstore.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.test.root-tests-enabled</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.test.random-seek-count</name>
|
||||||
|
<value>10</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.is-blobstore</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.is-case-sensitive</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.rename-returns-false-if-source-missing</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-append</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-atomic-directory-delete</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-atomic-rename</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-block-locality</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-concat</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-seek</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-seek-on-closed-file</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-available-on-closed-file</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.rejects-seek-past-eof</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-strict-exceptions</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>fs.contract.supports-unix-permissions</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</configuration>
|
Loading…
Reference in New Issue