mirror of https://github.com/apache/jclouds.git
Issue 65
git-svn-id: http://jclouds.googlecode.com/svn/trunk@1441 3d8758e0-26b5-11de-8745-db77d3ebf521
This commit is contained in:
parent
ec11f2ef20
commit
fb67368738
|
@ -23,11 +23,17 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.commands;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.jclouds.aws.AWSResponseException;
|
||||
import org.jclouds.aws.s3.commands.options.PutBucketOptions;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.http.HttpHeaders;
|
||||
import org.jclouds.http.commands.callables.ReturnTrueIf2xx;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.assistedinject.Assisted;
|
||||
import com.google.inject.name.Named;
|
||||
|
@ -35,31 +41,59 @@ import com.google.inject.name.Named;
|
|||
/**
|
||||
* Create and name your own bucket in which to store your objects.
|
||||
* <p/>
|
||||
* The PUT request operation with a bucket URI creates a new bucket. Depending
|
||||
* on your latency and legal requirements, you can specify a location constraint
|
||||
* that will affect where your data physically resides. You can currently
|
||||
* specify a Europe (EU) location constraint via {@link PutBucketOptions}.
|
||||
* The PUT request operation with a bucket URI creates a new bucket. Depending on your latency and
|
||||
* legal requirements, you can specify a location constraint that will affect where your data
|
||||
* physically resides. You can currently specify a Europe (EU) location constraint via
|
||||
* {@link PutBucketOptions}.
|
||||
*
|
||||
* @see PutBucketOptions
|
||||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTBucketPUT.html"
|
||||
* @see <a
|
||||
* href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTBucketPUT.html"
|
||||
* />
|
||||
* @author Adrian Cole
|
||||
*
|
||||
*/
|
||||
public class PutBucket extends S3FutureCommand<Boolean> {
|
||||
|
||||
@Inject
|
||||
public PutBucket(@Named("jclouds.http.address") String amazonHost,
|
||||
ReturnTrueIf2xx callable, @Assisted String bucketName,
|
||||
@Assisted PutBucketOptions options) {
|
||||
super("PUT", "/", callable, amazonHost, S3Utils
|
||||
.validateBucketName(bucketName));
|
||||
getRequest().getHeaders().putAll(options.buildRequestHeaders());
|
||||
String payload = options.buildPayload();
|
||||
if (payload != null) {
|
||||
getRequest().setPayload(payload);
|
||||
getRequest().getHeaders().put(HttpHeaders.CONTENT_LENGTH,
|
||||
payload.getBytes().length + "");
|
||||
}
|
||||
}
|
||||
@Inject
|
||||
public PutBucket(@Named("jclouds.http.address") String amazonHost, ReturnTrueIf2xx callable,
|
||||
@Assisted String bucketName, @Assisted PutBucketOptions options) {
|
||||
super("PUT", "/", callable, amazonHost, S3Utils.validateBucketName(bucketName));
|
||||
getRequest().getHeaders().putAll(options.buildRequestHeaders());
|
||||
String payload = options.buildPayload();
|
||||
if (payload != null) {
|
||||
getRequest().setPayload(payload);
|
||||
getRequest().getHeaders().put(HttpHeaders.CONTENT_LENGTH, payload.getBytes().length + "");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean get() throws InterruptedException, ExecutionException {
|
||||
try {
|
||||
return super.get();
|
||||
} catch (ExecutionException e) {
|
||||
return eventualConsistencyAlreadyOwnedIsOk(e);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static Boolean eventualConsistencyAlreadyOwnedIsOk(ExecutionException e) throws ExecutionException {
|
||||
if (e.getCause() != null && e.getCause() instanceof AWSResponseException) {
|
||||
AWSResponseException responseException = (AWSResponseException) e.getCause();
|
||||
if ("BucketAlreadyOwnedByYou".equals(responseException.getError().getCode())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean get(long l, TimeUnit timeUnit) throws InterruptedException, ExecutionException,
|
||||
TimeoutException {
|
||||
try {
|
||||
return super.get(l, timeUnit);
|
||||
} catch (ExecutionException e) {
|
||||
return eventualConsistencyAlreadyOwnedIsOk(e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -38,50 +38,48 @@ import java.util.concurrent.TimeUnit;
|
|||
/**
|
||||
* Tests integrated functionality of all PutBucket commands.
|
||||
* <p/>
|
||||
* Each test uses a different bucket name, so it should be perfectly fine to run
|
||||
* in parallel.
|
||||
* Each test uses a different bucket name, so it should be perfectly fine to run in parallel.
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
@Test(testName = "s3.PutBucketLiveTest")
|
||||
public class PutBucketLiveTest extends S3IntegrationTest {
|
||||
|
||||
/**
|
||||
* overriding bucketName as we are changing access permissions
|
||||
*/
|
||||
@Test(groups = {"live"})
|
||||
void testPublicReadAccessPolicy() throws Exception {
|
||||
String bucketName = bucketPrefix + "public";
|
||||
/**
|
||||
* overriding bucketName as we are changing access permissions
|
||||
*/
|
||||
@Test(groups = { "live" })
|
||||
void testPublicReadAccessPolicy() throws Exception {
|
||||
String bucketName = bucketPrefix + "public";
|
||||
|
||||
client.putBucketIfNotExists(bucketName,
|
||||
withBucketAcl(CannedAccessPolicy.PUBLIC_READ)).get(10,
|
||||
TimeUnit.SECONDS);
|
||||
URL url = new URL(String.format("http://%1$s.s3.amazonaws.com",
|
||||
bucketName));
|
||||
S3Utils.toStringAndClose(url.openStream());
|
||||
}
|
||||
client.putBucketIfNotExists(bucketName, withBucketAcl(CannedAccessPolicy.PUBLIC_READ)).get(
|
||||
10, TimeUnit.SECONDS);
|
||||
URL url = new URL(String.format("http://%1$s.s3.amazonaws.com", bucketName));
|
||||
S3Utils.toStringAndClose(url.openStream());
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IOException.class, groups = {"live"})
|
||||
void testDefaultAccessPolicy() throws Exception {
|
||||
URL url = new URL(String.format("http://%1$s.s3.amazonaws.com",
|
||||
bucketName));
|
||||
S3Utils.toStringAndClose(url.openStream());
|
||||
}
|
||||
@Test(groups = { "live" })
|
||||
void testPutTwiceIsOk() throws Exception {
|
||||
client.putBucketIfNotExists(bucketName).get(10, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
/**
|
||||
* overriding bucketName as we are changing location
|
||||
*/
|
||||
@Test(groups = "live")
|
||||
void testEu() throws Exception {
|
||||
String bucketName = (bucketPrefix + "wow").toLowerCase();
|
||||
client.putBucketIfNotExists(
|
||||
bucketName,
|
||||
createIn(LocationConstraint.EU).withBucketAcl(
|
||||
CannedAccessPolicy.PUBLIC_READ)).get(10,
|
||||
TimeUnit.SECONDS);
|
||||
@Test(expectedExceptions = IOException.class, groups = { "live" })
|
||||
void testDefaultAccessPolicy() throws Exception {
|
||||
URL url = new URL(String.format("http://%1$s.s3.amazonaws.com", bucketName));
|
||||
S3Utils.toStringAndClose(url.openStream());
|
||||
}
|
||||
|
||||
URL url = new URL(String.format("http://%1$s.s3.amazonaws.com",
|
||||
bucketName));
|
||||
S3Utils.toStringAndClose(url.openStream());
|
||||
}
|
||||
/**
|
||||
* overriding bucketName as we are changing location
|
||||
*/
|
||||
@Test(groups = "live")
|
||||
void testEu() throws Exception {
|
||||
String bucketName = (bucketPrefix + "wow").toLowerCase();
|
||||
client.putBucketIfNotExists(bucketName,
|
||||
createIn(LocationConstraint.EU).withBucketAcl(CannedAccessPolicy.PUBLIC_READ)).get(
|
||||
10, TimeUnit.SECONDS);
|
||||
|
||||
URL url = new URL(String.format("http://%1$s.s3.amazonaws.com", bucketName));
|
||||
S3Utils.toStringAndClose(url.openStream());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
/**
|
||||
*
|
||||
* Copyright (C) 2009 Global Cloud Specialists, Inc. <info@globalcloudspecialists.com>
|
||||
*
|
||||
* ====================================================================
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* ====================================================================
|
||||
*/
|
||||
package org.jclouds.aws.s3.commands;
|
||||
|
||||
import static org.easymock.EasyMock.expect;
|
||||
import static org.easymock.classextension.EasyMock.createMock;
|
||||
import static org.easymock.classextension.EasyMock.replay;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import org.jclouds.aws.AWSResponseException;
|
||||
import org.jclouds.aws.domain.AWSError;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
@Test(testName = "s3.PutBucketTest")
|
||||
public class PutBucketTest {
|
||||
|
||||
@Test
|
||||
void testBucketAlreadyOwnedByYouIsOk() throws Exception {
|
||||
ExecutionException e = getErrorWithCode("BucketAlreadyOwnedByYou");
|
||||
assert PutBucket.eventualConsistencyAlreadyOwnedIsOk(e);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBlahIsNotOk() throws Exception {
|
||||
ExecutionException e = getErrorWithCode("blah");
|
||||
try {
|
||||
PutBucket.eventualConsistencyAlreadyOwnedIsOk(e);
|
||||
assert false;
|
||||
} catch (ExecutionException er) {
|
||||
// don't try expectedExceptions as it will fail due to easymock reasons
|
||||
}
|
||||
}
|
||||
|
||||
private ExecutionException getErrorWithCode(String code) {
|
||||
AWSResponseException inner = createMock(AWSResponseException.class);
|
||||
ExecutionException e = createMock(ExecutionException.class);
|
||||
expect(e.getCause()).andReturn(inner).atLeastOnce();
|
||||
AWSError error = createMock(AWSError.class);
|
||||
expect(inner.getError()).andReturn(error);
|
||||
expect(error.getCode()).andReturn(code);
|
||||
replay(e);
|
||||
replay(inner);
|
||||
replay(error);
|
||||
return e;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue