HADOOP-15267. S3A multipart upload fails when SSE-C encryption is enabled.

Contributed by Anis Elleuch.
This commit is contained in:
Steve Loughran 2018-03-07 18:50:27 +00:00
parent c0986b1b7e
commit 1dedc68f9d
3 changed files with 82 additions and 0 deletions

View File

@ -1572,6 +1572,7 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities {
long len = request.getPartSize();
incrementPutStartStatistics(len);
try {
setOptionalUploadPartRequestParameters(request);
UploadPartResult uploadPartResult = s3.uploadPart(request);
incrementPutCompletedStatistics(true, len);
return uploadPartResult;
@ -2563,6 +2564,23 @@ public class S3AFileSystem extends FileSystem implements StreamCapabilities {
}
}
/**
* Sets server side encryption parameters to the part upload
* request when encryption is enabled.
* @param request upload part request
*/
protected void setOptionalUploadPartRequestParameters(
UploadPartRequest request) {
switch (serverSideEncryptionAlgorithm) {
case SSE_C:
if (isNotBlank(getServerSideEncryptionKey(bucket, getConf()))) {
request.setSSECustomerKey(generateSSECustomerKey());
}
break;
default:
}
}
/**
* Initiate a multipart upload from the preconfigured request.
* Retry policy: none + untranslated.

View File

@ -24,6 +24,7 @@ import java.net.URI;
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import com.amazonaws.services.s3.model.UploadPartRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -277,6 +278,11 @@ public class MockS3AFileSystem extends S3AFileSystem {
// no-op
}
@Override
protected void setOptionalUploadPartRequestParameters(
UploadPartRequest request) {
}
@Override
@SuppressWarnings("deprecation")
public long getDefaultBlockSize() {

View File

@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.s3a.scale;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.S3AEncryptionMethods;
import org.apache.hadoop.fs.s3a.S3ATestUtils;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled;
/**
* Concrete class that extends {@link ITestS3AHugeFilesDiskBlocks}
* and tests huge files operations with SSE-C encryption enabled.
* Skipped if the SSE tests are disabled.
*/
public class ITestS3AHugeFilesSSECDiskBlocks
extends ITestS3AHugeFilesDiskBlocks {
private static final String KEY_1
= "4niV/jPK5VFRHY+KNb6wtqYd4xXyMgdJ9XQJpcQUVbs=";
@Override
public void setup() throws Exception {
super.setup();
skipIfEncryptionTestsDisabled(getConfiguration());
}
@Override
protected Configuration createScaleConfiguration() {
Configuration conf = super.createScaleConfiguration();
S3ATestUtils.disableFilesystemCaching(conf);
conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM,
getSSEAlgorithm().getMethod());
conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, KEY_1);
return conf;
}
private S3AEncryptionMethods getSSEAlgorithm() {
return S3AEncryptionMethods.SSE_C;
}
}