NIFI-1542: Update how PutS3Object handles a 403 AccessDenied response to a S3 ListMultipartUploads request.

* Change log message from error to warning and added note about S3 permissions.
 * Advance the ageoff check time by the check interval as if the request succeeded to prevent re-checking on every upload when permission does not exist.

Reviewed by Tony Kurc (tkurc@apache.org) (with minor amendments for whitespace and error message consistency). This closes #240
This commit is contained in:
Joe Skora 2016-02-21 16:05:12 -05:00 committed by Tony Kurc
parent 9576f41603
commit 4c5263220c
1 changed files with 14 additions and 2 deletions

View File

@ -39,6 +39,7 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import com.amazonaws.services.s3.model.AmazonS3Exception;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
@ -695,8 +696,19 @@ public class PutS3Object extends AbstractS3Processor {
ageoffLocalState(ageCutoff);
lastS3AgeOff.set(System.currentTimeMillis());
} catch(AmazonClientException e) {
getLogger().error("Error checking S3 Multipart Upload list for {}: {}",
new Object[]{bucket, e.getMessage()});
if (e instanceof AmazonS3Exception
&& ((AmazonS3Exception)e).getStatusCode() == 403
&& ((AmazonS3Exception) e).getErrorCode().equals("AccessDenied")) {
getLogger().warn("AccessDenied checking S3 Multipart Upload list for {}: {} " +
"** The configured user does not have the s3:ListBucketMultipartUploads permission " +
"for this bucket, S3 ageoff cannot occur without this permission. Next ageoff check " +
"time is being advanced by interval to prevent checking on every upload **",
new Object[]{bucket, e.getMessage()});
lastS3AgeOff.set(System.currentTimeMillis());
} else {
getLogger().error("Error checking S3 Multipart Upload list for {}: {}",
new Object[]{bucket, e.getMessage()});
}
} finally {
s3BucketLock.unlock();
}