diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
index 1d1b9fdbe2c..5629dab21ff 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
@@ -168,6 +168,13 @@ You can also force all the tests to run with a specific SSE encryption method
by configuring the property `fs.s3a.server-side-encryption-algorithm` in the s3a
contract file.
+### Default Encryption
+
+Buckets can be configured with [default encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html)
+on the AWS side. Some S3AFileSystem tests are skipped when default encryption is
+enabled due to unpredictability in how [ETags](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html)
+are generated.
+
## Running the Tests
After completing the configuration, execute the test run through Maven.
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java
index c323f112a06..d0d42b89f10 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java
@@ -23,7 +23,10 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
+import java.nio.file.AccessDeniedException;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.model.GetBucketEncryptionResult;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import org.junit.Assume;
@@ -44,6 +47,7 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
+import static org.hamcrest.Matchers.nullValue;
/**
* Tests of the S3A FileSystem which don't have a specific home and can share
@@ -151,10 +155,12 @@ public class ITestS3AMiscOperations extends AbstractS3ATestBase {
/**
* The assumption here is that 0-byte files uploaded in a single PUT
* always have the same checksum, including stores with encryption.
+ * This will be skipped if the bucket has S3 default encryption enabled.
* @throws Throwable on a failure
*/
@Test
public void testEmptyFileChecksums() throws Throwable {
+ assumeNoDefaultEncryption();
final S3AFileSystem fs = getFileSystem();
Path file1 = touchFile("file1");
EtagChecksum checksum1 = fs.getFileChecksum(file1, 0);
@@ -167,6 +173,20 @@ public class ITestS3AMiscOperations extends AbstractS3ATestBase {
fs.getFileChecksum(touchFile("file2"), 0));
}
+ /**
+ * Skip a test if we can get the default encryption on a bucket and it is
+ * non-null.
+ */
+ private void assumeNoDefaultEncryption() throws IOException {
+ try {
+ Assume.assumeThat(getDefaultEncryption(), nullValue());
+ } catch (AccessDeniedException e) {
+ // if the user can't check the default encryption, assume that it is
+ // null and keep going
+ LOG.warn("User does not have permission to call getBucketEncryption()");
+ }
+ }
+
/**
* Make sure that when checksums are disabled, the caller
* gets null back.
@@ -207,12 +227,13 @@ public class ITestS3AMiscOperations extends AbstractS3ATestBase {
/**
* Verify that on an unencrypted store, the checksum of two non-empty
* (single PUT) files is the same if the data is the same.
- * This will fail if the bucket has S3 default encryption enabled.
+ * This will be skipped if the bucket has S3 default encryption enabled.
* @throws Throwable failure
*/
@Test
public void testNonEmptyFileChecksumsUnencrypted() throws Throwable {
Assume.assumeTrue(encryptionAlgorithm().equals(S3AEncryptionMethods.NONE));
+ assumeNoDefaultEncryption();
final S3AFileSystem fs = getFileSystem();
final EtagChecksum checksum1 =
fs.getFileChecksum(mkFile("file5", HELLO), 0);
@@ -256,6 +277,7 @@ public class ITestS3AMiscOperations extends AbstractS3ATestBase {
}
/**
+<<<<<<< ours
* Verify that paths with a trailing "/" are fixed up.
*/
@Test
@@ -368,4 +390,21 @@ public class ITestS3AMiscOperations extends AbstractS3ATestBase {
s.endsWith("/"));
return o;
}
+
+ /**
+ * Gets default encryption settings for the bucket or returns null if default
+ * encryption is disabled.
+ */
+ private GetBucketEncryptionResult getDefaultEncryption() throws IOException {
+ S3AFileSystem fs = getFileSystem();
+ AmazonS3 s3 = fs.getAmazonS3ClientForTesting("check default encryption");
+ try {
+ return Invoker.once("getBucketEncryption()",
+ fs.getBucket(),
+ () -> s3.getBucketEncryption(fs.getBucket()));
+ } catch (FileNotFoundException e) {
+ return null;
+ }
+ }
+
}