Use ESBlobStoreRepositoryIntegTestCase to test the repository-s3 plugin (#29315)

This commit adds the S3BlobStoreRepositoryTests class that extends the
base testing class for S3. It also removes some usage of socket servers 
that emulate socket connections in unit tests. It was added to trigger 
security exceptions, but this won't be needed anymore since #29296 
is merged.
This commit is contained in:
Tanguy Leroux 2018-04-05 13:34:02 +02:00 committed by GitHub
parent dccd43af47
commit d813a05b9f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 278 additions and 219 deletions

View File

@ -151,8 +151,7 @@ class S3Repository extends BlobStoreRepository {
/** /**
* Constructs an s3 backed repository * Constructs an s3 backed repository
*/ */
S3Repository(RepositoryMetaData metadata, Settings settings, S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) {
NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) throws IOException {
super(metadata, settings, namedXContentRegistry); super(metadata, settings, namedXContentRegistry);
String bucket = BUCKET_SETTING.get(metadata.settings()); String bucket = BUCKET_SETTING.get(metadata.settings());

View File

@ -20,14 +20,14 @@
package org.elasticsearch.repositories.s3; package org.elasticsearch.repositories.s3;
import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.SdkClientException; import com.amazonaws.SdkClientException;
import com.amazonaws.services.s3.AbstractAmazonS3; import com.amazonaws.services.s3.AbstractAmazonS3;
import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.AmazonS3Exception;
import com.amazonaws.services.s3.model.CopyObjectRequest; import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.CopyObjectResult; import com.amazonaws.services.s3.model.CopyObjectResult;
import com.amazonaws.services.s3.model.DeleteObjectRequest; import com.amazonaws.services.s3.model.DeleteObjectRequest;
import com.amazonaws.services.s3.model.GetObjectMetadataRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.DeleteObjectsResult;
import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ListObjectsRequest; import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.ObjectListing;
@ -37,197 +37,163 @@ import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.net.InetAddress;
import java.net.Socket;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.Objects;
import java.util.concurrent.ConcurrentMap;
import static org.junit.Assert.assertTrue; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
class MockAmazonS3 extends AbstractAmazonS3 { class MockAmazonS3 extends AbstractAmazonS3 {
private final int mockSocketPort; private final ConcurrentMap<String, byte[]> blobs;
private final String bucket;
private final boolean serverSideEncryption;
private final String cannedACL;
private final String storageClass;
private Map<String, InputStream> blobs = new ConcurrentHashMap<>(); MockAmazonS3(final ConcurrentMap<String, byte[]> blobs,
final String bucket,
// in ESBlobStoreContainerTestCase.java, the maximum final boolean serverSideEncryption,
// length of the input data is 100 bytes final String cannedACL,
private byte[] byteCounter = new byte[100]; final String storageClass) {
this.blobs = Objects.requireNonNull(blobs);
this.bucket = Objects.requireNonNull(bucket);
MockAmazonS3(int mockSocketPort) { this.serverSideEncryption = serverSideEncryption;
this.mockSocketPort = mockSocketPort; this.cannedACL = cannedACL;
} this.storageClass = storageClass;
// Simulate a socket connection to check that SocketAccess.doPrivileged() is used correctly.
// Any method of AmazonS3 might potentially open a socket to the S3 service. Firstly, a call
// to any method of AmazonS3 has to be wrapped by SocketAccess.doPrivileged().
// Secondly, each method on the stack from doPrivileged to opening the socket has to be
// located in a jar that is provided by the plugin.
// Thirdly, a SocketPermission has to be configured in plugin-security.policy.
// By opening a socket in each method of MockAmazonS3 it is ensured that in production AmazonS3
// is able to to open a socket to the S3 Service without causing a SecurityException
private void simulateS3SocketConnection() {
try (Socket socket = new Socket(InetAddress.getByName("127.0.0.1"), mockSocketPort)) {
assertTrue(socket.isConnected()); // NOOP to keep static analysis happy
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public boolean doesBucketExist(String bucket) {
return true;
} }
@Override @Override
public boolean doesObjectExist(String bucketName, String objectName) throws AmazonServiceException, SdkClientException { public boolean doesBucketExist(final String bucket) {
simulateS3SocketConnection(); return this.bucket.equalsIgnoreCase(bucket);
}
@Override
public boolean doesObjectExist(final String bucketName, final String objectName) throws SdkClientException {
assertThat(bucketName, equalTo(bucket));
return blobs.containsKey(objectName); return blobs.containsKey(objectName);
} }
@Override @Override
public ObjectMetadata getObjectMetadata( public PutObjectResult putObject(final PutObjectRequest request) throws AmazonClientException {
GetObjectMetadataRequest getObjectMetadataRequest) assertThat(request.getBucketName(), equalTo(bucket));
throws AmazonClientException, AmazonServiceException { assertThat(request.getMetadata().getSSEAlgorithm(), serverSideEncryption ? equalTo("AES256") : nullValue());
simulateS3SocketConnection(); assertThat(request.getCannedAcl(), notNullValue());
String blobName = getObjectMetadataRequest.getKey(); assertThat(request.getCannedAcl().toString(), cannedACL != null ? equalTo(cannedACL) : equalTo("private"));
assertThat(request.getStorageClass(), storageClass != null ? equalTo(storageClass) : equalTo("STANDARD"));
if (!blobs.containsKey(blobName)) {
throw new AmazonS3Exception("[" + blobName + "] does not exist."); final String blobName = request.getKey();
final ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
Streams.copy(request.getInputStream(), out);
blobs.put(blobName, out.toByteArray());
} catch (IOException e) {
throw new AmazonClientException(e);
} }
return new ObjectMetadata(); // nothing is done with it
}
@Override
public PutObjectResult putObject(PutObjectRequest putObjectRequest)
throws AmazonClientException, AmazonServiceException {
simulateS3SocketConnection();
String blobName = putObjectRequest.getKey();
if (blobs.containsKey(blobName)) {
throw new AmazonS3Exception("[" + blobName + "] already exists.");
}
blobs.put(blobName, putObjectRequest.getInputStream());
return new PutObjectResult(); return new PutObjectResult();
} }
@Override @Override
public S3Object getObject(GetObjectRequest getObjectRequest) public S3Object getObject(final GetObjectRequest request) throws AmazonClientException {
throws AmazonClientException, AmazonServiceException { assertThat(request.getBucketName(), equalTo(bucket));
simulateS3SocketConnection();
// in ESBlobStoreContainerTestCase.java, the prefix is empty,
// so the key and blobName are equivalent to each other
String blobName = getObjectRequest.getKey();
if (!blobs.containsKey(blobName)) { final String blobName = request.getKey();
throw new AmazonS3Exception("[" + blobName + "] does not exist."); final byte[] content = blobs.get(blobName);
if (content == null) {
AmazonS3Exception exception = new AmazonS3Exception("[" + blobName + "] does not exist.");
exception.setStatusCode(404);
throw exception;
} }
// the HTTP request attribute is irrelevant for reading ObjectMetadata metadata = new ObjectMetadata();
S3ObjectInputStream stream = new S3ObjectInputStream( metadata.setContentLength(content.length);
blobs.get(blobName), null, false);
S3Object s3Object = new S3Object(); S3Object s3Object = new S3Object();
s3Object.setObjectContent(stream); s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(content), null, false));
s3Object.setKey(blobName);
s3Object.setObjectMetadata(metadata);
return s3Object; return s3Object;
} }
@Override @Override
public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) public ObjectListing listObjects(final ListObjectsRequest request) throws AmazonClientException {
throws AmazonClientException, AmazonServiceException { assertThat(request.getBucketName(), equalTo(bucket));
simulateS3SocketConnection();
MockObjectListing list = new MockObjectListing();
list.setTruncated(false);
String blobName; final ObjectListing listing = new ObjectListing();
String prefix = listObjectsRequest.getPrefix(); listing.setBucketName(request.getBucketName());
listing.setPrefix(request.getPrefix());
ArrayList<S3ObjectSummary> mockObjectSummaries = new ArrayList<>(); for (Map.Entry<String, byte[]> blob : blobs.entrySet()) {
if (Strings.isEmpty(request.getPrefix()) || blob.getKey().startsWith(request.getPrefix())) {
for (Map.Entry<String, InputStream> blob : blobs.entrySet()) { S3ObjectSummary summary = new S3ObjectSummary();
blobName = blob.getKey(); summary.setBucketName(request.getBucketName());
S3ObjectSummary objectSummary = new S3ObjectSummary(); summary.setKey(blob.getKey());
summary.setSize(blob.getValue().length);
if (prefix.isEmpty() || blobName.startsWith(prefix)) { listing.getObjectSummaries().add(summary);
objectSummary.setKey(blobName);
try {
objectSummary.setSize(getSize(blob.getValue()));
} catch (IOException e) {
throw new AmazonS3Exception("Object listing " +
"failed for blob [" + blob.getKey() + "]");
}
mockObjectSummaries.add(objectSummary);
} }
} }
return listing;
list.setObjectSummaries(mockObjectSummaries);
return list;
} }
@Override @Override
public CopyObjectResult copyObject(CopyObjectRequest copyObjectRequest) public CopyObjectResult copyObject(final CopyObjectRequest request) throws AmazonClientException {
throws AmazonClientException, AmazonServiceException { assertThat(request.getSourceBucketName(), equalTo(bucket));
simulateS3SocketConnection(); assertThat(request.getDestinationBucketName(), equalTo(bucket));
String sourceBlobName = copyObjectRequest.getSourceKey();
String targetBlobName = copyObjectRequest.getDestinationKey();
if (!blobs.containsKey(sourceBlobName)) { final String sourceBlobName = request.getSourceKey();
throw new AmazonS3Exception("Source blob [" +
sourceBlobName + "] does not exist."); final byte[] content = blobs.get(sourceBlobName);
if (content == null) {
AmazonS3Exception exception = new AmazonS3Exception("[" + sourceBlobName + "] does not exist.");
exception.setStatusCode(404);
throw exception;
} }
if (blobs.containsKey(targetBlobName)) { blobs.put(request.getDestinationKey(), content);
throw new AmazonS3Exception("Target blob [" + return new CopyObjectResult();
targetBlobName + "] already exists.");
}
blobs.put(targetBlobName, blobs.get(sourceBlobName));
return new CopyObjectResult(); // nothing is done with it
} }
@Override @Override
public void deleteObject(DeleteObjectRequest deleteObjectRequest) public void deleteObject(final DeleteObjectRequest request) throws AmazonClientException {
throws AmazonClientException, AmazonServiceException { assertThat(request.getBucketName(), equalTo(bucket));
simulateS3SocketConnection();
String blobName = deleteObjectRequest.getKey();
if (!blobs.containsKey(blobName)) { final String blobName = request.getKey();
throw new AmazonS3Exception("[" + blobName + "] does not exist."); if (blobs.remove(blobName) == null) {
AmazonS3Exception exception = new AmazonS3Exception("[" + blobName + "] does not exist.");
exception.setStatusCode(404);
throw exception;
} }
blobs.remove(blobName);
} }
private int getSize(InputStream stream) throws IOException { @Override
int size = stream.read(byteCounter); public DeleteObjectsResult deleteObjects(DeleteObjectsRequest request) throws SdkClientException {
stream.reset(); // in case we ever need the size again assertThat(request.getBucketName(), equalTo(bucket));
return size;
}
private class MockObjectListing extends ObjectListing { final List<DeleteObjectsResult.DeletedObject> deletions = new ArrayList<>();
// the objectSummaries attribute in ObjectListing.java for (DeleteObjectsRequest.KeyVersion key : request.getKeys()) {
// is read-only, but we need to be able to write to it, if (blobs.remove(key.getKey()) == null) {
// so we create a mock of it to work around this AmazonS3Exception exception = new AmazonS3Exception("[" + key + "] does not exist.");
private List<S3ObjectSummary> mockObjectSummaries; exception.setStatusCode(404);
throw exception;
@Override } else {
public List<S3ObjectSummary> getObjectSummaries() { DeleteObjectsResult.DeletedObject deletion = new DeleteObjectsResult.DeletedObject();
return mockObjectSummaries; deletion.setKey(key.getKey());
} deletions.add(deletion);
}
private void setObjectSummaries(List<S3ObjectSummary> objectSummaries) {
mockObjectSummaries = objectSummaries;
} }
return new DeleteObjectsResult(deletions);
} }
} }

View File

@ -37,26 +37,19 @@ import com.amazonaws.services.s3.model.UploadPartResult;
import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.mocksocket.MockServerSocket;
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream; import java.util.stream.IntStream;
import static org.elasticsearch.repositories.s3.S3BlobStoreTests.randomMockS3BlobStore;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doNothing;
@ -67,36 +60,11 @@ import static org.mockito.Mockito.when;
public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
private static ServerSocket mockS3ServerSocket; protected BlobStore newBlobStore() {
return randomMockS3BlobStore();
private static Thread mockS3AcceptorThread;
// Opens a MockSocket to simulate connections to S3 checking that SocketPermissions are set up correctly.
// See MockAmazonS3.simulateS3SocketConnection.
@BeforeClass
public static void openMockSocket() throws IOException {
mockS3ServerSocket = new MockServerSocket(0, 50, InetAddress.getByName("127.0.0.1"));
mockS3AcceptorThread = new Thread(() -> {
while (!mockS3ServerSocket.isClosed()) {
try {
// Accept connections from MockAmazonS3.
mockS3ServerSocket.accept();
} catch (IOException e) {
}
}
});
mockS3AcceptorThread.start();
} }
protected BlobStore newBlobStore() throws IOException { public void testExecuteSingleUploadBlobSizeTooLarge() {
MockAmazonS3 client = new MockAmazonS3(mockS3ServerSocket.getLocalPort());
String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
return new S3BlobStore(Settings.EMPTY, client, bucket, false,
new ByteSizeValue(10, ByteSizeUnit.MB), "public-read-write", "standard");
}
public void testExecuteSingleUploadBlobSizeTooLarge() throws IOException {
final long blobSize = ByteSizeUnit.GB.toBytes(randomIntBetween(6, 10)); final long blobSize = ByteSizeUnit.GB.toBytes(randomIntBetween(6, 10));
final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobStore blobStore = mock(S3BlobStore.class);
final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore);
@ -106,7 +74,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage()); assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage());
} }
public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() throws IOException { public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() {
final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobStore blobStore = mock(S3BlobStore.class);
when(blobStore.bufferSizeInBytes()).thenReturn(ByteSizeUnit.MB.toBytes(1)); when(blobStore.bufferSizeInBytes()).thenReturn(ByteSizeUnit.MB.toBytes(1));
@ -168,7 +136,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
} }
} }
public void testExecuteMultipartUploadBlobSizeTooLarge() throws IOException { public void testExecuteMultipartUploadBlobSizeTooLarge() {
final long blobSize = ByteSizeUnit.TB.toBytes(randomIntBetween(6, 10)); final long blobSize = ByteSizeUnit.TB.toBytes(randomIntBetween(6, 10));
final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobStore blobStore = mock(S3BlobStore.class);
final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore);
@ -179,7 +147,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage()); assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage());
} }
public void testExecuteMultipartUploadBlobSizeTooSmall() throws IOException { public void testExecuteMultipartUploadBlobSizeTooSmall() {
final long blobSize = ByteSizeUnit.MB.toBytes(randomIntBetween(1, 4)); final long blobSize = ByteSizeUnit.MB.toBytes(randomIntBetween(1, 4));
final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobStore blobStore = mock(S3BlobStore.class);
final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore);
@ -291,7 +259,7 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
assertEquals(expectedEtags, actualETags); assertEquals(expectedEtags, actualETags);
} }
public void testExecuteMultipartUploadAborted() throws IOException { public void testExecuteMultipartUploadAborted() {
final String bucketName = randomAlphaOfLengthBetween(1, 10); final String bucketName = randomAlphaOfLengthBetween(1, 10);
final String blobName = randomAlphaOfLengthBetween(1, 10); final String blobName = randomAlphaOfLengthBetween(1, 10);
final BlobPath blobPath = new BlobPath(); final BlobPath blobPath = new BlobPath();
@ -418,12 +386,4 @@ public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase {
assertEquals("Expected number of parts [" + expectedParts + "] but got [" + result.v1() + "]", expectedParts, (long) result.v1()); assertEquals("Expected number of parts [" + expectedParts + "] but got [" + result.v1() + "]", expectedParts, (long) result.v1());
assertEquals("Expected remaining [" + expectedRemaining + "] but got [" + result.v2() + "]", expectedRemaining, (long) result.v2()); assertEquals("Expected remaining [" + expectedRemaining + "] but got [" + result.v2() + "]", expectedRemaining, (long) result.v2());
} }
@AfterClass
public static void closeMockSocket() throws IOException, InterruptedException {
mockS3ServerSocket.close();
mockS3AcceptorThread.join();
mockS3AcceptorThread = null;
mockS3ServerSocket = null;
}
} }

View File

@ -0,0 +1,109 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.s3;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.StorageClass;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.Repository;
import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.util.Collection;
import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCase {
private static final ConcurrentMap<String, byte[]> blobs = new ConcurrentHashMap<>();
private static String bucket;
private static String client;
private static ByteSizeValue bufferSize;
private static boolean serverSideEncryption;
private static String cannedACL;
private static String storageClass;
@BeforeClass
public static void setUpRepositorySettings() {
bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
client = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
bufferSize = new ByteSizeValue(randomIntBetween(5, 50), ByteSizeUnit.MB);
serverSideEncryption = randomBoolean();
if (randomBoolean()) {
cannedACL = randomFrom(CannedAccessControlList.values()).toString();
}
if (randomBoolean()) {
storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString();
}
}
@AfterClass
public static void wipeRepository() {
blobs.clear();
}
@Override
protected void createTestRepository(final String name) {
assertAcked(client().admin().cluster().preparePutRepository(name)
.setType(S3Repository.TYPE)
.setSettings(Settings.builder()
.put(S3Repository.BUCKET_SETTING.getKey(), bucket)
.put(InternalAwsS3Service.CLIENT_NAME.getKey(), client)
.put(S3Repository.BUFFER_SIZE_SETTING.getKey(), bufferSize)
.put(S3Repository.SERVER_SIDE_ENCRYPTION_SETTING.getKey(), serverSideEncryption)
.put(S3Repository.CANNED_ACL_SETTING.getKey(), cannedACL)
.put(S3Repository.STORAGE_CLASS_SETTING.getKey(), storageClass)));
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(TestS3RepositoryPlugin.class);
}
public static class TestS3RepositoryPlugin extends S3RepositoryPlugin {
public TestS3RepositoryPlugin(final Settings settings) {
super(settings);
}
@Override
public Map<String, Repository.Factory> getRepositories(final Environment env, final NamedXContentRegistry registry) {
return Collections.singletonMap(S3Repository.TYPE, (metadata) ->
new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings(), emptyMap()) {
@Override
public synchronized AmazonS3 client(final Settings repositorySettings) {
return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass);
}
}));
}
}
}

View File

@ -19,18 +19,29 @@
package org.elasticsearch.repositories.s3; package org.elasticsearch.repositories.s3;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.StorageClass; import com.amazonaws.services.s3.model.StorageClass;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.BlobStoreException;
import org.elasticsearch.repositories.s3.S3BlobStore; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.repositories.ESBlobStoreTestCase;
import java.io.IOException; import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
public class S3BlobStoreTests extends ESTestCase { public class S3BlobStoreTests extends ESBlobStoreTestCase {
public void testInitCannedACL() throws IOException {
@Override
protected BlobStore newBlobStore() {
return randomMockS3BlobStore();
}
public void testInitCannedACL() {
String[] aclList = new String[]{ String[] aclList = new String[]{
"private", "public-read", "public-read-write", "authenticated-read", "private", "public-read", "public-read-write", "authenticated-read",
"log-delivery-write", "bucket-owner-read", "bucket-owner-full-control"}; "log-delivery-write", "bucket-owner-read", "bucket-owner-full-control"};
@ -52,16 +63,12 @@ public class S3BlobStoreTests extends ESTestCase {
} }
} }
public void testInvalidCannedACL() throws IOException { public void testInvalidCannedACL() {
try { BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initCannedACL("test_invalid"));
S3BlobStore.initCannedACL("test_invalid"); assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]"));
fail("CannedACL should fail");
} catch (BlobStoreException ex) {
assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]"));
}
} }
public void testInitStorageClass() throws IOException { public void testInitStorageClass() {
// it should default to `standard` // it should default to `standard`
assertThat(S3BlobStore.initStorageClass(null), equalTo(StorageClass.Standard)); assertThat(S3BlobStore.initStorageClass(null), equalTo(StorageClass.Standard));
assertThat(S3BlobStore.initStorageClass(""), equalTo(StorageClass.Standard)); assertThat(S3BlobStore.initStorageClass(""), equalTo(StorageClass.Standard));
@ -72,25 +79,43 @@ public class S3BlobStoreTests extends ESTestCase {
assertThat(S3BlobStore.initStorageClass("reduced_redundancy"), equalTo(StorageClass.ReducedRedundancy)); assertThat(S3BlobStore.initStorageClass("reduced_redundancy"), equalTo(StorageClass.ReducedRedundancy));
} }
public void testCaseInsensitiveStorageClass() throws IOException { public void testCaseInsensitiveStorageClass() {
assertThat(S3BlobStore.initStorageClass("sTandaRd"), equalTo(StorageClass.Standard)); assertThat(S3BlobStore.initStorageClass("sTandaRd"), equalTo(StorageClass.Standard));
assertThat(S3BlobStore.initStorageClass("sTandaRd_Ia"), equalTo(StorageClass.StandardInfrequentAccess)); assertThat(S3BlobStore.initStorageClass("sTandaRd_Ia"), equalTo(StorageClass.StandardInfrequentAccess));
assertThat(S3BlobStore.initStorageClass("reduCED_redundancy"), equalTo(StorageClass.ReducedRedundancy)); assertThat(S3BlobStore.initStorageClass("reduCED_redundancy"), equalTo(StorageClass.ReducedRedundancy));
} }
public void testInvalidStorageClass() throws IOException { public void testInvalidStorageClass() {
try { BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("whatever"));
S3BlobStore.initStorageClass("whatever"); assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class."));
} catch(BlobStoreException ex) {
assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class."));
}
} }
public void testRejectGlacierStorageClass() throws IOException { public void testRejectGlacierStorageClass() {
try { BlobStoreException ex = expectThrows(BlobStoreException.class, () -> S3BlobStore.initStorageClass("glacier"));
S3BlobStore.initStorageClass("glacier"); assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported"));
} catch(BlobStoreException ex) { }
assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported"));
/**
* Creates a new {@link S3BlobStore} with random settings.
* <p>
* The blobstore uses a {@link MockAmazonS3} client.
*/
public static S3BlobStore randomMockS3BlobStore() {
String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
ByteSizeValue bufferSize = new ByteSizeValue(randomIntBetween(5, 100), ByteSizeUnit.MB);
boolean serverSideEncryption = randomBoolean();
String cannedACL = null;
if (randomBoolean()) {
cannedACL = randomFrom(CannedAccessControlList.values()).toString();
} }
String storageClass = null;
if (randomBoolean()) {
storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString();
}
AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass);
return new S3BlobStore(Settings.EMPTY, client, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass);
} }
} }