mirror of https://github.com/apache/jclouds.git
Issue 35: moved content, but also renamed options to parameters, as it makes more sense and reformatted some code
git-svn-id: http://jclouds.googlecode.com/svn/trunk@843 3d8758e0-26b5-11de-8745-db77d3ebf521
This commit is contained in:
parent
e5e3c48185
commit
4eced672f1
|
@ -0,0 +1,67 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
$HeadURL$
|
||||
$Revision$
|
||||
$Date$
|
||||
|
||||
Copyright (C) 2009 Adrian Cole <adrian@jclouds.org>
|
||||
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
====================================================================
|
||||
-->
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<parent>
|
||||
<groupId>org.jclouds</groupId>
|
||||
<artifactId>jclouds-aws-project</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.jclouds</groupId>
|
||||
<artifactId>jclouds-aws-core</artifactId>
|
||||
<name>jclouds Amazon AWS Components Core</name>
|
||||
<packaging>jar</packaging>
|
||||
<description>jclouds Core components to access Amazon AWS</description>
|
||||
|
||||
<scm>
|
||||
<connection>scm:svn:http://jclouds.googlecode.com/svn/trunk/aws/core</connection>
|
||||
<developerConnection>scm:svn:https://jclouds.googlecode.com/svn/trunk/aws/core</developerConnection>
|
||||
<url>http://jclouds.googlecode.com/svn/trunk/aws/core</url>
|
||||
</scm>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>1.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15</artifactId>
|
||||
<version>140</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xstream</groupId>
|
||||
<artifactId>xstream</artifactId>
|
||||
<version>1.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
|
@ -0,0 +1,110 @@
|
|||
package org.jclouds.aws.util;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.NoSuchProviderException;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.bouncycastle.crypto.digests.MD5Digest;
|
||||
import org.bouncycastle.crypto.digests.SHA1Digest;
|
||||
import org.bouncycastle.crypto.macs.HMac;
|
||||
import org.bouncycastle.crypto.params.KeyParameter;
|
||||
import org.bouncycastle.util.encoders.Base64;
|
||||
import org.jclouds.util.Utils;
|
||||
|
||||
/**
|
||||
* Encryption, Hashing, and IO Utilities needed to sign and verify AWS requests and responses.
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class AWSUtils extends Utils {
|
||||
|
||||
protected static final Pattern IP_PATTERN = Pattern
|
||||
.compile("b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?).)"
|
||||
+ "{3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)b");
|
||||
static final byte[] HEX_CHAR_TABLE = { (byte) '0', (byte) '1', (byte) '2', (byte) '3',
|
||||
(byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) 'a',
|
||||
(byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f' };
|
||||
|
||||
public static String toHexString(byte[] raw) throws UnsupportedEncodingException {
|
||||
byte[] hex = new byte[2 * raw.length];
|
||||
int index = 0;
|
||||
|
||||
for (byte b : raw) {
|
||||
int v = b & 0xFF;
|
||||
hex[index++] = HEX_CHAR_TABLE[v >>> 4];
|
||||
hex[index++] = HEX_CHAR_TABLE[v & 0xF];
|
||||
}
|
||||
return new String(hex, "ASCII");
|
||||
}
|
||||
|
||||
public static byte[] fromHexString(String hex) {
|
||||
byte[] bytes = new byte[hex.length() / 2];
|
||||
for (int i = 0; i < bytes.length; i++) {
|
||||
bytes[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public static String hmacSha1Base64(String toEncode, byte[] key)
|
||||
throws NoSuchAlgorithmException, NoSuchProviderException, InvalidKeyException {
|
||||
HMac hmac = new HMac(new SHA1Digest());
|
||||
byte[] resBuf = new byte[hmac.getMacSize()];
|
||||
byte[] plainBytes = toEncode.getBytes();
|
||||
byte[] keyBytes = key;
|
||||
hmac.init(new KeyParameter(keyBytes));
|
||||
hmac.update(plainBytes, 0, plainBytes.length);
|
||||
hmac.doFinal(resBuf, 0);
|
||||
return toBase64String(resBuf);
|
||||
}
|
||||
|
||||
public static String md5Hex(byte[] toEncode) throws NoSuchAlgorithmException,
|
||||
NoSuchProviderException, InvalidKeyException, UnsupportedEncodingException {
|
||||
byte[] resBuf = md5(toEncode);
|
||||
return toHexString(resBuf);
|
||||
}
|
||||
|
||||
public static String md5Base64(byte[] toEncode) throws NoSuchAlgorithmException,
|
||||
NoSuchProviderException, InvalidKeyException {
|
||||
byte[] resBuf = md5(toEncode);
|
||||
return toBase64String(resBuf);
|
||||
}
|
||||
|
||||
public static byte[] md5(byte[] plainBytes) {
|
||||
MD5Digest md5 = new MD5Digest();
|
||||
byte[] resBuf = new byte[md5.getDigestSize()];
|
||||
md5.update(plainBytes, 0, plainBytes.length);
|
||||
md5.doFinal(resBuf, 0);
|
||||
return resBuf;
|
||||
}
|
||||
|
||||
public static byte[] md5(File toEncode) throws IOException {
|
||||
MD5Digest md5 = new MD5Digest();
|
||||
byte[] resBuf = new byte[md5.getDigestSize()];
|
||||
byte[] buffer = new byte[1024];
|
||||
int numRead = -1;
|
||||
InputStream i = new FileInputStream(toEncode);
|
||||
try {
|
||||
do {
|
||||
numRead = i.read(buffer);
|
||||
if (numRead > 0) {
|
||||
md5.update(buffer, 0, numRead);
|
||||
}
|
||||
} while (numRead != -1);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(i);
|
||||
}
|
||||
md5.doFinal(resBuf, 0);
|
||||
return resBuf;
|
||||
}
|
||||
|
||||
public static String toBase64String(byte[] resBuf) {
|
||||
return new String(Base64.encode(resBuf));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
/**
|
||||
*
|
||||
* Copyright (C) 2009 Adrian Cole <adrian@jclouds.org>
|
||||
*
|
||||
* ====================================================================
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* ====================================================================
|
||||
*/
|
||||
package org.jclouds.aws.util;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.SimpleTimeZone;
|
||||
|
||||
import net.jcip.annotations.GuardedBy;
|
||||
import net.jcip.annotations.ThreadSafe;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* Parses and formats the ISO8601 and RFC822 date formats found in XML responses and HTTP response
|
||||
* headers.
|
||||
* <p>
|
||||
* Either {@link SimpleDateFormat} or {@link DateTimeFormatter} classes are used internally,
|
||||
* depending on which version gives the best performance.
|
||||
*
|
||||
* @author Adrian Cole
|
||||
* @author James Murty
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class DateService {
|
||||
/*
|
||||
* Use default Java Date/SimpleDateFormat classes for date manipulation, but be *very* careful to
|
||||
* guard against the lack of thread safety.
|
||||
*/
|
||||
|
||||
@GuardedBy("this")
|
||||
private static final SimpleDateFormat iso8601SimpleDateFormat = new SimpleDateFormat(
|
||||
"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.US);
|
||||
|
||||
@GuardedBy("this")
|
||||
private static final SimpleDateFormat rfc822SimpleDateFormat = new SimpleDateFormat(
|
||||
"EEE, dd MMM yyyy HH:mm:ss z", Locale.US);
|
||||
|
||||
private static final DateTimeFormatter iso8601DateTimeFormatter = DateTimeFormat.forPattern(
|
||||
"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withLocale(Locale.US).withZone(
|
||||
DateTimeZone.forID("GMT"));
|
||||
|
||||
private static final DateTimeFormatter rfc822DateTimeFormatter = DateTimeFormat.forPattern(
|
||||
"EEE, dd MMM yyyy HH:mm:ss 'GMT'").withLocale(Locale.US).withZone(
|
||||
DateTimeZone.forID("GMT"));
|
||||
|
||||
static {
|
||||
iso8601SimpleDateFormat.setTimeZone(new SimpleTimeZone(0, "GMT"));
|
||||
rfc822SimpleDateFormat.setTimeZone(new SimpleTimeZone(0, "GMT"));
|
||||
}
|
||||
|
||||
public final String rfc822DateFormat(DateTime dateTime) {
|
||||
return rfc822DateTimeFormatter.print(dateTime);
|
||||
}
|
||||
|
||||
public final String rfc822DateFormat(Date date) {
|
||||
return rfc822DateFormat(new DateTime(date));
|
||||
}
|
||||
|
||||
public final String rfc822DateFormat() {
|
||||
return rfc822DateFormat(new DateTime());
|
||||
}
|
||||
|
||||
public final DateTime rfc822DateParse(String toParse) {
|
||||
synchronized (rfc822SimpleDateFormat) {
|
||||
try {
|
||||
return new DateTime(rfc822SimpleDateFormat.parse(toParse));
|
||||
} catch (ParseException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final String iso8601DateFormat(DateTime dateTime) {
|
||||
return iso8601DateTimeFormatter.print(dateTime);
|
||||
}
|
||||
|
||||
public final String iso8601DateFormat(Date date) {
|
||||
return iso8601DateFormat(new DateTime(date));
|
||||
}
|
||||
|
||||
public final String iso8601DateFormat() {
|
||||
return iso8601DateFormat(new DateTime());
|
||||
}
|
||||
|
||||
public final DateTime iso8601DateParse(String toParse) {
|
||||
synchronized (iso8601SimpleDateFormat) {
|
||||
try {
|
||||
return new DateTime(iso8601SimpleDateFormat.parse(toParse));
|
||||
} catch (ParseException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Alternative implementations of Format and Parse -- used to test relative speeds. TODO: Remove
|
||||
* methods below once sufficient performance testing is complete.
|
||||
*/
|
||||
|
||||
@VisibleForTesting
|
||||
public final DateTime jodaIso8601DateParse(String toParse) {
|
||||
return new DateTime(toParse);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public final String sdfIso8601DateFormat(DateTime dateTime) {
|
||||
synchronized (iso8601SimpleDateFormat) {
|
||||
return iso8601SimpleDateFormat.format(dateTime.toDate());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -32,6 +32,7 @@ import java.util.concurrent.ExecutorCompletionService;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import org.jclouds.aws.util.DateServiceTest;
|
||||
import org.testng.annotations.AfterTest;
|
||||
import org.testng.annotations.BeforeTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
@ -57,20 +58,17 @@ public class PerformanceTest {
|
|||
exec.shutdownNow();
|
||||
exec = null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Executes a list of Runnable tasks in {@link #THREAD_COUNT}
|
||||
* simultaneous threads, and outputs the timing results.
|
||||
* Executes a list of Runnable tasks in {@link #THREAD_COUNT} simultaneous threads, and outputs
|
||||
* the timing results.
|
||||
* <p>
|
||||
* This method is careful to time only the actual task execution
|
||||
* time, not the overhead of creating and queuing the tasks.
|
||||
* We also use CountDownLatches to ensure that all tasks start
|
||||
* at the same time, so concurrency is fully tested without
|
||||
* ramp-up or ramp-down times.
|
||||
* This method is careful to time only the actual task execution time, not the overhead of
|
||||
* creating and queuing the tasks. We also use CountDownLatches to ensure that all tasks start at
|
||||
* the same time, so concurrency is fully tested without ramp-up or ramp-down times.
|
||||
* <p>
|
||||
* This code is heavily based on Listing 5.11 in
|
||||
* "Java Concurrency in Practice" by Brian Goetz et al,
|
||||
* Addison-Wesley Professional.
|
||||
* This code is heavily based on Listing 5.11 in "Java Concurrency in Practice" by Brian Goetz et
|
||||
* al, Addison-Wesley Professional.
|
||||
*
|
||||
* @see {@link DateServiceTest} for example usage.
|
||||
*
|
||||
|
@ -80,55 +78,53 @@ public class PerformanceTest {
|
|||
* @throws ExecutionException
|
||||
* @throws Throwable
|
||||
*/
|
||||
protected void executeMultiThreadedPerformanceTest(String performanceTestName, List<Runnable> tasks)
|
||||
throws InterruptedException, ExecutionException, Throwable
|
||||
{
|
||||
protected void executeMultiThreadedPerformanceTest(String performanceTestName,
|
||||
List<Runnable> tasks) throws InterruptedException, ExecutionException, Throwable {
|
||||
CompletionService<Throwable> completer = new ExecutorCompletionService<Throwable>(exec);
|
||||
final CountDownLatch startGate = new CountDownLatch(1);
|
||||
final CountDownLatch endGate = new CountDownLatch(THREAD_COUNT);
|
||||
|
||||
|
||||
for (int i = 0; i < THREAD_COUNT; i++) {
|
||||
final Runnable task = tasks.get(i % tasks.size());
|
||||
// Wrap task so we can count down endGate.
|
||||
final Runnable task = tasks.get(i % tasks.size());
|
||||
// Wrap task so we can count down endGate.
|
||||
completer.submit(new Callable<Throwable>() {
|
||||
public Throwable call() {
|
||||
try {
|
||||
startGate.await(); // Wait to start simultaneously
|
||||
task.run();
|
||||
return null;
|
||||
} catch (Throwable t) {
|
||||
return t;
|
||||
} finally {
|
||||
endGate.countDown(); // Notify that I've finished
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Throwable call() {
|
||||
try {
|
||||
startGate.await(); // Wait to start simultaneously
|
||||
task.run();
|
||||
return null;
|
||||
} catch (Throwable t) {
|
||||
return t;
|
||||
} finally {
|
||||
endGate.countDown(); // Notify that I've finished
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Only time the execution time for all tasks, not start/stop times.
|
||||
long startTime = System.nanoTime();
|
||||
startGate.countDown(); // Trigger start of all tasks
|
||||
startGate.countDown(); // Trigger start of all tasks
|
||||
endGate.await();
|
||||
long endTime = System.nanoTime() - startTime;
|
||||
|
||||
// Check for assertion failures
|
||||
Throwable t;
|
||||
Throwable t;
|
||||
for (int i = 0; i < THREAD_COUNT; i++) {
|
||||
t = completer.take().get();
|
||||
if (t != null) {
|
||||
throw t;
|
||||
}
|
||||
}
|
||||
t = completer.take().get();
|
||||
if (t != null) {
|
||||
throw t;
|
||||
}
|
||||
}
|
||||
if (performanceTestName != null) {
|
||||
System.out.printf("TIMING: Multi-threaded %s took %.3fms for %d threads\n",
|
||||
performanceTestName, ((double)endTime / 1000000), THREAD_COUNT);
|
||||
System.out.printf("TIMING: Multi-threaded %s took %.3fms for %d threads\n",
|
||||
performanceTestName, ((double) endTime / 1000000), THREAD_COUNT);
|
||||
}
|
||||
}
|
||||
|
||||
protected void executeMultiThreadedCorrectnessTest(List<Runnable> tasks)
|
||||
throws InterruptedException, ExecutionException, Throwable
|
||||
{
|
||||
executeMultiThreadedPerformanceTest(null, tasks);
|
||||
protected void executeMultiThreadedCorrectnessTest(List<Runnable> tasks)
|
||||
throws InterruptedException, ExecutionException, Throwable {
|
||||
executeMultiThreadedPerformanceTest(null, tasks);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,216 @@
|
|||
/**
|
||||
*
|
||||
* Copyright (C) 2009 Adrian Cole <adrian@jclouds.org>
|
||||
*
|
||||
* ====================================================================
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
* ====================================================================
|
||||
*/
|
||||
package org.jclouds.aws.util;
|
||||
|
||||
import static org.testng.Assert.assertEquals;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import org.jclouds.aws.PerformanceTest;
|
||||
import org.joda.time.DateTime;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import com.google.inject.Guice;
|
||||
import com.google.inject.Injector;
|
||||
|
||||
/*
|
||||
* TODO: Scrap any non-DateService references (eg Joda & Amazon) if/when
|
||||
* we confirm that the DateService is fast enough.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Compares performance of date operations
|
||||
*
|
||||
* @author Adrian Cole
|
||||
* @author James Murty
|
||||
*/
|
||||
@Test(sequential = true, timeOut = 2 * 60 * 1000, testName = "s3.DateTest")
|
||||
public class DateServiceTest extends PerformanceTest {
|
||||
Injector i = Guice.createInjector();
|
||||
|
||||
DateService dateService = i.getInstance(DateService.class);
|
||||
|
||||
protected TestData[] testData;
|
||||
|
||||
protected class TestData {
|
||||
public final String iso8601DateString;
|
||||
public final String rfc822DateString;
|
||||
public final DateTime date;
|
||||
|
||||
TestData(String iso8601, String rfc822, DateTime dateTime) {
|
||||
this.iso8601DateString = iso8601;
|
||||
this.rfc822DateString = rfc822;
|
||||
this.date = dateTime;
|
||||
}
|
||||
}
|
||||
|
||||
public DateServiceTest() {
|
||||
// Constant time test values, each TestData item must contain matching times!
|
||||
testData = new TestData[] {
|
||||
new TestData("2009-03-12T02:00:07.000Z", "Thu, 12 Mar 2009 02:00:07 GMT",
|
||||
new DateTime(1236823207000l)),
|
||||
new TestData("2009-03-14T04:00:07.000Z", "Sat, 14 Mar 2009 04:00:07 GMT",
|
||||
new DateTime(1237003207000l)),
|
||||
new TestData("2009-03-16T06:00:07.000Z", "Mon, 16 Mar 2009 06:00:07 GMT",
|
||||
new DateTime(1237183207000l)),
|
||||
new TestData("2009-03-18T08:00:07.000Z", "Wed, 18 Mar 2009 08:00:07 GMT",
|
||||
new DateTime(1237363207000l)),
|
||||
new TestData("2009-03-20T10:00:07.000Z", "Fri, 20 Mar 2009 10:00:07 GMT",
|
||||
new DateTime(1237543207000l)) };
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIso8601DateParse() throws ExecutionException, InterruptedException {
|
||||
DateTime dsDate = dateService.iso8601DateParse(testData[0].iso8601DateString);
|
||||
assertEquals(dsDate, testData[0].date);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRfc822DateParse() throws ExecutionException, InterruptedException {
|
||||
DateTime dsDate = dateService.rfc822DateParse(testData[0].rfc822DateString);
|
||||
assertEquals(dsDate, testData[0].date);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIso8601DateFormat() throws ExecutionException, InterruptedException {
|
||||
String dsString = dateService.iso8601DateFormat(testData[0].date);
|
||||
assertEquals(dsString, testData[0].iso8601DateString);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRfc822DateFormat() throws ExecutionException, InterruptedException {
|
||||
String dsString = dateService.rfc822DateFormat(testData[0].date);
|
||||
assertEquals(dsString, testData[0].rfc822DateString);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testIso8601DateFormatResponseTime() throws ExecutionException, InterruptedException {
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.iso8601DateFormat();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRfc822DateFormatResponseTime() throws ExecutionException, InterruptedException {
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.rfc822DateFormat();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatIso8601DateCorrectnessInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData : testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
String dsString = dateService.iso8601DateFormat(myData.date);
|
||||
assertEquals(dsString, myData.iso8601DateString);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedCorrectnessTest(tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatIso8601DatePerformanceInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData : testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.iso8601DateFormat(myData.date);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest("testFormatIso8601DatePerformanceInParallel", tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatIso8601DatePerformanceInParallel_SdfAlternative() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData : testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.sdfIso8601DateFormat(myData.date);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest(
|
||||
"testFormatIso8601DatePerformanceInParallel_SdfAlternative", tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DateSerialResponseTime() throws ExecutionException, InterruptedException {
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.iso8601DateParse(testData[0].iso8601DateString);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DateSerialResponseTime_JodaAlternative() throws ExecutionException,
|
||||
InterruptedException {
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.jodaIso8601DateParse(testData[0].iso8601DateString);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DateCorrectnessInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData : testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
DateTime dsDate = dateService.iso8601DateParse(myData.iso8601DateString);
|
||||
assertEquals(dsDate, myData.date);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedCorrectnessTest(tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DatePerformanceInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData : testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.iso8601DateParse(myData.iso8601DateString);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest("testParseIso8601DatePerformanceInParallel", tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DatePerformanceInParallel_JodaAlternative() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData : testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.jodaIso8601DateParse(myData.iso8601DateString);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest(
|
||||
"testParseIso8601DatePerformanceInParallel_JodaAlternative", tasks);
|
||||
}
|
||||
|
||||
}
|
|
@ -38,6 +38,7 @@
|
|||
<packaging>pom</packaging>
|
||||
<name>jclouds aws project</name>
|
||||
<modules>
|
||||
<module>core</module>
|
||||
<module>s3</module>
|
||||
</modules>
|
||||
<properties>
|
||||
|
@ -57,7 +58,6 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
<profiles>
|
||||
<profile>
|
||||
|
|
|
@ -1,75 +1,56 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
$HeadURL$
|
||||
$Revision$
|
||||
$Date$
|
||||
<!--
|
||||
$HeadURL$ $Revision$ $Date$ Copyright (C) 2009 Adrian Cole
|
||||
<adrian@jclouds.org>
|
||||
|
||||
Copyright (C) 2009 Adrian Cole <adrian@jclouds.org>
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to you under the Apache License, Version
|
||||
2.0 (the "License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
====================================================================
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
====================================================================
|
||||
-->
|
||||
http://www.apache.org/licenses/LICENSE-2.0.html Unless required by
|
||||
applicable law or agreed to in writing, software distributed under the
|
||||
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
CONDITIONS OF ANY KIND, either express or implied. See the License for
|
||||
the specific language governing permissions and limitations under the
|
||||
License.
|
||||
====================================================================
|
||||
-->
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<parent>
|
||||
<groupId>org.jclouds</groupId>
|
||||
<artifactId>jclouds-s3-project</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.jclouds</groupId>
|
||||
<artifactId>jclouds-s3</artifactId>
|
||||
<name>jclouds Amazon S3 Components Core</name>
|
||||
<packaging>jar</packaging>
|
||||
<description>jclouds Core components to access Amazon S3</description>
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<parent>
|
||||
<groupId>org.jclouds</groupId>
|
||||
<artifactId>jclouds-s3-project</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.jclouds</groupId>
|
||||
<artifactId>jclouds-s3</artifactId>
|
||||
<name>jclouds Amazon S3 Components Core</name>
|
||||
<packaging>jar</packaging>
|
||||
<description>jclouds Core components to access Amazon S3</description>
|
||||
|
||||
<scm>
|
||||
<connection>scm:svn:http://jclouds.googlecode.com/svn/trunk/s3</connection>
|
||||
<developerConnection>scm:svn:https://jclouds.googlecode.com/svn/trunk/s3</developerConnection>
|
||||
<url>http://jclouds.googlecode.com/svn/trunk/s3</url>
|
||||
</scm>
|
||||
<scm>
|
||||
<connection>scm:svn:http://jclouds.googlecode.com/svn/trunk/s3</connection>
|
||||
<developerConnection>scm:svn:https://jclouds.googlecode.com/svn/trunk/s3</developerConnection>
|
||||
<url>http://jclouds.googlecode.com/svn/trunk/s3</url>
|
||||
</scm>
|
||||
|
||||
<properties>
|
||||
<jclouds.aws.accesskeyid></jclouds.aws.accesskeyid>
|
||||
<jclouds.aws.secretaccesskey></jclouds.aws.secretaccesskey>
|
||||
<jclouds.s3.httpstream.url>http://apache.rediris.es/maven/binaries/apache-maven-2.1.0-bin.tar.bz2
|
||||
<properties>
|
||||
<jclouds.s3.httpstream.url>http://apache.rediris.es/maven/binaries/apache-maven-2.1.0-bin.tar.bz2
|
||||
</jclouds.s3.httpstream.url>
|
||||
<jclouds.s3.httpstream.md5>9268c9de2cccfd0d8fbcdbcfaf517a87</jclouds.s3.httpstream.md5>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>1.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15</artifactId>
|
||||
<version>140</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xstream</groupId>
|
||||
<artifactId>xstream</artifactId>
|
||||
<version>1.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<jclouds.s3.httpstream.md5>9268c9de2cccfd0d8fbcdbcfaf517a87</jclouds.s3.httpstream.md5>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>xstream</groupId>
|
||||
<artifactId>xstream</artifactId>
|
||||
<version>1.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
|
|
@ -23,8 +23,30 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static org.jclouds.aws.s3.reference.S3Constants.PROPERTY_AWS_ACCESSKEYID;
|
||||
import static org.jclouds.aws.s3.reference.S3Constants.PROPERTY_AWS_SECRETACCESSKEY;
|
||||
import static org.jclouds.command.pool.PoolConstants.PROPERTY_POOL_IO_WORKER_THREADS;
|
||||
import static org.jclouds.command.pool.PoolConstants.PROPERTY_POOL_MAX_CONNECTIONS;
|
||||
import static org.jclouds.command.pool.PoolConstants.PROPERTY_POOL_MAX_CONNECTION_REUSE;
|
||||
import static org.jclouds.command.pool.PoolConstants.PROPERTY_POOL_MAX_SESSION_FAILURES;
|
||||
import static org.jclouds.command.pool.PoolConstants.PROPERTY_POOL_REQUEST_INVOKER_THREADS;
|
||||
import static org.jclouds.http.HttpConstants.PROPERTY_HTTP_ADDRESS;
|
||||
import static org.jclouds.http.HttpConstants.PROPERTY_HTTP_PORT;
|
||||
import static org.jclouds.http.HttpConstants.PROPERTY_HTTP_SECURE;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.jclouds.aws.s3.config.LiveS3ConnectionModule;
|
||||
import org.jclouds.aws.s3.config.S3ConnectionModule;
|
||||
import org.jclouds.aws.s3.config.S3ContextModule;
|
||||
import org.jclouds.http.config.HttpFutureCommandClientModule;
|
||||
import org.jclouds.http.config.JavaUrlHttpFutureCommandClientModule;
|
||||
import org.jclouds.logging.config.LoggingModule;
|
||||
import org.jclouds.logging.jdk.config.JDKLoggingModule;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.base.Predicates;
|
||||
import com.google.common.collect.Iterables;
|
||||
|
@ -34,203 +56,165 @@ import com.google.inject.Guice;
|
|||
import com.google.inject.Injector;
|
||||
import com.google.inject.Module;
|
||||
import com.google.inject.name.Names;
|
||||
import org.jclouds.aws.s3.config.LiveS3ConnectionModule;
|
||||
import org.jclouds.aws.s3.config.S3ConnectionModule;
|
||||
import org.jclouds.aws.s3.config.S3ContextModule;
|
||||
import static org.jclouds.aws.s3.reference.S3Constants.PROPERTY_AWS_ACCESSKEYID;
|
||||
import static org.jclouds.aws.s3.reference.S3Constants.PROPERTY_AWS_SECRETACCESSKEY;
|
||||
import org.jclouds.aws.s3.internal.LiveS3Connection;
|
||||
import static org.jclouds.command.pool.PoolConstants.*;
|
||||
import static org.jclouds.http.HttpConstants.*;
|
||||
import org.jclouds.http.config.HttpFutureCommandClientModule;
|
||||
import org.jclouds.http.config.JavaUrlHttpFutureCommandClientModule;
|
||||
import org.jclouds.logging.config.LoggingModule;
|
||||
import org.jclouds.logging.jdk.config.JDKLoggingModule;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Creates {@link S3Context} or {@link Injector} instances based on the most
|
||||
* commonly requested arguments.
|
||||
* Creates {@link S3Context} or {@link Injector} instances based on the most commonly requested
|
||||
* arguments.
|
||||
* <p/>
|
||||
* Note that Threadsafe objects will be bound as singletons to the Injector or
|
||||
* Context provided.
|
||||
* Note that Threadsafe objects will be bound as singletons to the Injector or Context provided.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* If no <code>Module</code>s are specified, the default
|
||||
* {@link JDKLoggingModule logging} and
|
||||
* {@link JavaUrlHttpFutureCommandClientModule http transports} will be
|
||||
* installed.
|
||||
*
|
||||
* If no <code>Module</code>s are specified, the default {@link JDKLoggingModule logging} and
|
||||
* {@link JavaUrlHttpFutureCommandClientModule http transports} will be installed.
|
||||
*
|
||||
* @author Adrian Cole
|
||||
* @see S3Context
|
||||
*/
|
||||
public class S3ContextFactory {
|
||||
|
||||
public static final Properties DEFAULT_PROPERTIES;
|
||||
public static final Properties DEFAULT_PROPERTIES;
|
||||
|
||||
static {
|
||||
DEFAULT_PROPERTIES = new Properties();
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_HTTP_ADDRESS,
|
||||
"s3.amazonaws.com");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_HTTP_PORT, "443");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_HTTP_SECURE, "true");
|
||||
DEFAULT_PROPERTIES
|
||||
.setProperty(PROPERTY_POOL_MAX_CONNECTION_REUSE, "75");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_MAX_SESSION_FAILURES, "2");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_REQUEST_INVOKER_THREADS,
|
||||
"1");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_IO_WORKER_THREADS, "2");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_MAX_CONNECTIONS, "12");
|
||||
}
|
||||
static {
|
||||
DEFAULT_PROPERTIES = new Properties();
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_HTTP_ADDRESS, "s3.amazonaws.com");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_HTTP_PORT, "443");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_HTTP_SECURE, "true");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_MAX_CONNECTION_REUSE, "75");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_MAX_SESSION_FAILURES, "2");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_REQUEST_INVOKER_THREADS, "1");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_IO_WORKER_THREADS, "2");
|
||||
DEFAULT_PROPERTIES.setProperty(PROPERTY_POOL_MAX_CONNECTIONS, "12");
|
||||
}
|
||||
|
||||
public static Injector createInjector(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties
|
||||
.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
public static Injector createInjector(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
|
||||
public static S3Context createS3Context(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, modules)
|
||||
.getInstance(S3Context.class);
|
||||
}
|
||||
public static S3Context createS3Context(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, modules).getInstance(
|
||||
S3Context.class);
|
||||
}
|
||||
|
||||
public static Injector createInjector(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, boolean isSecure, Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties
|
||||
.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
properties
|
||||
.setProperty(PROPERTY_HTTP_SECURE, Boolean.toString(isSecure));
|
||||
if (!isSecure)
|
||||
properties.setProperty(PROPERTY_HTTP_PORT, "80");
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
public static Injector createInjector(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
boolean isSecure, Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
properties.setProperty(PROPERTY_HTTP_SECURE, Boolean.toString(isSecure));
|
||||
if (!isSecure)
|
||||
properties.setProperty(PROPERTY_HTTP_PORT, "80");
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
|
||||
public static S3Context createS3Context(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, boolean isSecure, Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, isSecure,
|
||||
modules).getInstance(S3Context.class);
|
||||
}
|
||||
public static S3Context createS3Context(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
boolean isSecure, Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, isSecure, modules).getInstance(
|
||||
S3Context.class);
|
||||
}
|
||||
|
||||
public static Injector createInjector(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, boolean isSecure, String server,
|
||||
Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties
|
||||
.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
properties
|
||||
.setProperty(PROPERTY_HTTP_SECURE, Boolean.toString(isSecure));
|
||||
properties.setProperty(PROPERTY_HTTP_ADDRESS, server);
|
||||
if (!isSecure)
|
||||
properties.setProperty(PROPERTY_HTTP_PORT, "80");
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
public static Injector createInjector(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
boolean isSecure, String server, Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
properties.setProperty(PROPERTY_HTTP_SECURE, Boolean.toString(isSecure));
|
||||
properties.setProperty(PROPERTY_HTTP_ADDRESS, server);
|
||||
if (!isSecure)
|
||||
properties.setProperty(PROPERTY_HTTP_PORT, "80");
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
|
||||
public static S3Context createS3Context(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, boolean isSecure, String server,
|
||||
Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, isSecure,
|
||||
server, modules).getInstance(S3Context.class);
|
||||
}
|
||||
public static S3Context createS3Context(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
boolean isSecure, String server, Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, isSecure, server, modules)
|
||||
.getInstance(S3Context.class);
|
||||
}
|
||||
|
||||
public static S3Context createS3Context(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, boolean isSecure, String server,
|
||||
int port, Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, isSecure,
|
||||
server, port, modules).getInstance(S3Context.class);
|
||||
}
|
||||
public static S3Context createS3Context(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
boolean isSecure, String server, int port, Module... modules) {
|
||||
return createInjector(awsAccessKeyId, awsSecretAccessKey, isSecure, server, port, modules)
|
||||
.getInstance(S3Context.class);
|
||||
}
|
||||
|
||||
public static Injector createInjector(String awsAccessKeyId,
|
||||
String awsSecretAccessKey, boolean isSecure, String server,
|
||||
int port, Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties
|
||||
.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
properties
|
||||
.setProperty(PROPERTY_HTTP_SECURE, Boolean.toString(isSecure));
|
||||
properties.setProperty(PROPERTY_HTTP_ADDRESS, server);
|
||||
properties.setProperty(PROPERTY_HTTP_PORT, port + "");
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
public static Injector createInjector(String awsAccessKeyId, String awsSecretAccessKey,
|
||||
boolean isSecure, String server, int port, Module... modules) {
|
||||
Properties properties = new Properties(DEFAULT_PROPERTIES);
|
||||
properties.setProperty(PROPERTY_AWS_ACCESSKEYID, awsAccessKeyId);
|
||||
properties.setProperty(PROPERTY_AWS_SECRETACCESSKEY, awsSecretAccessKey);
|
||||
properties.setProperty(PROPERTY_HTTP_SECURE, Boolean.toString(isSecure));
|
||||
properties.setProperty(PROPERTY_HTTP_ADDRESS, server);
|
||||
properties.setProperty(PROPERTY_HTTP_PORT, port + "");
|
||||
return createInjector(properties, modules);
|
||||
}
|
||||
|
||||
public static S3Context createS3Context(Properties properties,
|
||||
Module... modules) {
|
||||
return createInjector(properties, modules).getInstance(S3Context.class);
|
||||
}
|
||||
public static S3Context createS3Context(Properties properties, Module... modules) {
|
||||
return createInjector(properties, modules).getInstance(S3Context.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind the given properties and install the list of modules. If no modules
|
||||
* are specified, install the default {@link JDKLoggingModule}
|
||||
* {@link JavaUrlHttpFutureCommandClientModule}
|
||||
*
|
||||
* @param properties - contains constants used by jclouds
|
||||
* {@link #DEFAULT_PROPERTIES}
|
||||
* @param configModules - alternative configuration modules
|
||||
*/
|
||||
public static Injector createInjector(final Properties properties,
|
||||
Module... configModules) {
|
||||
final List<Module> modules = Lists.newArrayList(configModules);
|
||||
/**
|
||||
* Bind the given properties and install the list of modules. If no modules are specified,
|
||||
* install the default {@link JDKLoggingModule} {@link JavaUrlHttpFutureCommandClientModule}
|
||||
*
|
||||
* @param properties
|
||||
* - contains constants used by jclouds {@link #DEFAULT_PROPERTIES}
|
||||
* @param configModules
|
||||
* - alternative configuration modules
|
||||
*/
|
||||
public static Injector createInjector(final Properties properties, Module... configModules) {
|
||||
final List<Module> modules = Lists.newArrayList(configModules);
|
||||
|
||||
addLoggingModuleIfNotPresent(modules);
|
||||
addLoggingModuleIfNotPresent(modules);
|
||||
|
||||
addHttpModuleIfNeededAndNotPresent(modules);
|
||||
addHttpModuleIfNeededAndNotPresent(modules);
|
||||
|
||||
addS3ConnectionModuleIfNotPresent(modules);
|
||||
addS3ConnectionModuleIfNotPresent(modules);
|
||||
|
||||
return Guice.createInjector(new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
Names.bindProperties(binder(), checkNotNull(properties,
|
||||
"properties"));
|
||||
for (Module module : modules)
|
||||
install(module);
|
||||
}
|
||||
}, new S3ContextModule());
|
||||
}
|
||||
return Guice.createInjector(new AbstractModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
Names.bindProperties(binder(), checkNotNull(properties, "properties"));
|
||||
for (Module module : modules)
|
||||
install(module);
|
||||
}
|
||||
}, new S3ContextModule());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static void addHttpModuleIfNeededAndNotPresent(final List<Module> modules) {
|
||||
if (Iterables.any(modules, new Predicate<Module>() {
|
||||
public boolean apply(Module input) {
|
||||
return input instanceof LiveS3ConnectionModule;
|
||||
}
|
||||
@VisibleForTesting
|
||||
static void addHttpModuleIfNeededAndNotPresent(final List<Module> modules) {
|
||||
if (Iterables.any(modules, new Predicate<Module>() {
|
||||
public boolean apply(Module input) {
|
||||
return input instanceof LiveS3ConnectionModule;
|
||||
}
|
||||
|
||||
}) && (!Iterables.any(modules, new Predicate<Module>() {
|
||||
public boolean apply(Module input) {
|
||||
return input.getClass().isAnnotationPresent(
|
||||
HttpFutureCommandClientModule.class);
|
||||
}
|
||||
}) && (!Iterables.any(modules, new Predicate<Module>() {
|
||||
public boolean apply(Module input) {
|
||||
return input.getClass().isAnnotationPresent(HttpFutureCommandClientModule.class);
|
||||
}
|
||||
|
||||
})))
|
||||
modules.add(new JavaUrlHttpFutureCommandClientModule());
|
||||
}
|
||||
})))
|
||||
modules.add(new JavaUrlHttpFutureCommandClientModule());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static void addS3ConnectionModuleIfNotPresent(final List<Module> modules) {
|
||||
if (!Iterables.any(modules, new Predicate<Module>() {
|
||||
public boolean apply(Module input) {
|
||||
return input.getClass().isAnnotationPresent(
|
||||
S3ConnectionModule
|
||||
.class);
|
||||
}
|
||||
@VisibleForTesting
|
||||
static void addS3ConnectionModuleIfNotPresent(final List<Module> modules) {
|
||||
if (!Iterables.any(modules, new Predicate<Module>() {
|
||||
public boolean apply(Module input) {
|
||||
return input.getClass().isAnnotationPresent(S3ConnectionModule.class);
|
||||
}
|
||||
|
||||
})){
|
||||
modules.add(new LiveS3ConnectionModule());
|
||||
}
|
||||
}
|
||||
})) {
|
||||
modules.add(new LiveS3ConnectionModule());
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
static void addLoggingModuleIfNotPresent(final List<Module> modules) {
|
||||
if (!Iterables.any(modules, Predicates.instanceOf(LoggingModule.class)))
|
||||
modules.add(new JDKLoggingModule());
|
||||
}
|
||||
@VisibleForTesting
|
||||
static void addLoggingModuleIfNotPresent(final List<Module> modules) {
|
||||
if (!Iterables.any(modules, Predicates.instanceOf(LoggingModule.class)))
|
||||
modules.add(new JDKLoggingModule());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,112 +41,103 @@ import com.google.inject.name.Named;
|
|||
* @author Adrian Cole
|
||||
*/
|
||||
public class S3CommandFactory {
|
||||
@Inject
|
||||
private S3ParserFactory parserFactory;
|
||||
@Inject
|
||||
private S3ParserFactory parserFactory;
|
||||
|
||||
@Inject
|
||||
private DeleteBucketFactory deleteBucketFactory;
|
||||
@Inject
|
||||
private DeleteBucketFactory deleteBucketFactory;
|
||||
|
||||
public static interface DeleteBucketFactory {
|
||||
DeleteBucket create(String bucket);
|
||||
}
|
||||
public static interface DeleteBucketFactory {
|
||||
DeleteBucket create(String bucket);
|
||||
}
|
||||
|
||||
public DeleteBucket createDeleteBucket(String bucket) {
|
||||
return deleteBucketFactory.create(bucket);
|
||||
}
|
||||
public DeleteBucket createDeleteBucket(String bucket) {
|
||||
return deleteBucketFactory.create(bucket);
|
||||
}
|
||||
|
||||
@Inject
|
||||
private DeleteObjectFactory deleteObjectFactory;
|
||||
@Inject
|
||||
private DeleteObjectFactory deleteObjectFactory;
|
||||
|
||||
public static interface DeleteObjectFactory {
|
||||
DeleteObject create(@Assisted("bucketName") String bucket,
|
||||
@Assisted("key") String key);
|
||||
}
|
||||
public static interface DeleteObjectFactory {
|
||||
DeleteObject create(@Assisted("bucketName") String bucket, @Assisted("key") String key);
|
||||
}
|
||||
|
||||
public DeleteObject createDeleteObject(String bucket, String key) {
|
||||
return deleteObjectFactory.create(bucket, key);
|
||||
}
|
||||
public DeleteObject createDeleteObject(String bucket, String key) {
|
||||
return deleteObjectFactory.create(bucket, key);
|
||||
}
|
||||
|
||||
@Inject
|
||||
private BucketExistsFactory headBucketFactory;
|
||||
@Inject
|
||||
private BucketExistsFactory headBucketFactory;
|
||||
|
||||
public static interface BucketExistsFactory {
|
||||
BucketExists create(String bucket);
|
||||
}
|
||||
public static interface BucketExistsFactory {
|
||||
BucketExists create(String bucket);
|
||||
}
|
||||
|
||||
public BucketExists createHeadBucket(String bucket) {
|
||||
return headBucketFactory.create(bucket);
|
||||
}
|
||||
public BucketExists createHeadBucket(String bucket) {
|
||||
return headBucketFactory.create(bucket);
|
||||
}
|
||||
|
||||
@Inject
|
||||
private PutBucketFactory putBucketFactoryOptions;
|
||||
@Inject
|
||||
private PutBucketFactory putBucketFactoryOptions;
|
||||
|
||||
public static interface PutBucketFactory {
|
||||
PutBucket create(String bucket, PutBucketOptions options);
|
||||
}
|
||||
public static interface PutBucketFactory {
|
||||
PutBucket create(String bucket, PutBucketOptions options);
|
||||
}
|
||||
|
||||
public PutBucket createPutBucket(String bucket, PutBucketOptions options) {
|
||||
return putBucketFactoryOptions.create(bucket, options);
|
||||
}
|
||||
public PutBucket createPutBucket(String bucket, PutBucketOptions options) {
|
||||
return putBucketFactoryOptions.create(bucket, options);
|
||||
}
|
||||
|
||||
@Inject
|
||||
private PutObjectFactory putObjectFactory;
|
||||
@Inject
|
||||
private PutObjectFactory putObjectFactory;
|
||||
|
||||
public static interface PutObjectFactory {
|
||||
PutObject create(String bucket, S3Object object,
|
||||
PutObjectOptions options);
|
||||
}
|
||||
public static interface PutObjectFactory {
|
||||
PutObject create(String bucket, S3Object object, PutObjectOptions options);
|
||||
}
|
||||
|
||||
public PutObject createPutObject(String bucket, S3Object s3Object,
|
||||
PutObjectOptions options) {
|
||||
return putObjectFactory.create(bucket, s3Object, options);
|
||||
}
|
||||
public PutObject createPutObject(String bucket, S3Object s3Object, PutObjectOptions options) {
|
||||
return putObjectFactory.create(bucket, s3Object, options);
|
||||
}
|
||||
|
||||
@Inject
|
||||
private GetObjectFactory getObjectFactory;
|
||||
@Inject
|
||||
private GetObjectFactory getObjectFactory;
|
||||
|
||||
public static interface GetObjectFactory {
|
||||
GetObject create(@Assisted("bucketName") String bucket,
|
||||
@Assisted("key") String key, GetObjectOptions options);
|
||||
}
|
||||
public static interface GetObjectFactory {
|
||||
GetObject create(@Assisted("bucketName") String bucket, @Assisted("key") String key,
|
||||
GetObjectOptions options);
|
||||
}
|
||||
|
||||
public GetObject createGetObject(String bucket, String key,
|
||||
GetObjectOptions options) {
|
||||
return getObjectFactory.create(bucket, key, options);
|
||||
}
|
||||
public GetObject createGetObject(String bucket, String key, GetObjectOptions options) {
|
||||
return getObjectFactory.create(bucket, key, options);
|
||||
}
|
||||
|
||||
@Inject
|
||||
private HeadMetadataFactory headMetadataFactory;
|
||||
@Inject
|
||||
private HeadMetadataFactory headMetadataFactory;
|
||||
|
||||
public static interface HeadMetadataFactory {
|
||||
HeadObject create(@Assisted("bucketName") String bucket,
|
||||
@Assisted("key") String key);
|
||||
}
|
||||
public static interface HeadMetadataFactory {
|
||||
HeadObject create(@Assisted("bucketName") String bucket, @Assisted("key") String key);
|
||||
}
|
||||
|
||||
public HeadObject createHeadMetadata(String bucket, String key) {
|
||||
return headMetadataFactory.create(bucket, key);
|
||||
}
|
||||
public HeadObject createHeadMetadata(String bucket, String key) {
|
||||
return headMetadataFactory.create(bucket, key);
|
||||
}
|
||||
|
||||
@Inject
|
||||
@Named("jclouds.http.address")
|
||||
String amazonHost;
|
||||
@Inject
|
||||
@Named("jclouds.http.address")
|
||||
String amazonHost;
|
||||
|
||||
public ListOwnedBuckets createGetMetadataForOwnedBuckets() {
|
||||
return new ListOwnedBuckets(amazonHost, parserFactory
|
||||
.createListBucketsParser());
|
||||
}
|
||||
public ListOwnedBuckets createGetMetadataForOwnedBuckets() {
|
||||
return new ListOwnedBuckets(amazonHost, parserFactory.createListBucketsParser());
|
||||
}
|
||||
|
||||
public ListBucket createListBucket(String bucket, ListBucketOptions options) {
|
||||
return new ListBucket(amazonHost, parserFactory
|
||||
.createListBucketParser(), bucket, options);
|
||||
}
|
||||
public ListBucket createListBucket(String bucket, ListBucketOptions options) {
|
||||
return new ListBucket(amazonHost, parserFactory.createListBucketParser(), bucket, options);
|
||||
}
|
||||
|
||||
public CopyObject createCopyObject(String sourceBucket,
|
||||
String sourceObject, String destinationBucket,
|
||||
String destinationObject, CopyObjectOptions options) {
|
||||
return new CopyObject(amazonHost, parserFactory
|
||||
.createCopyObjectParser(), sourceBucket, sourceObject,
|
||||
destinationBucket, destinationObject, options);
|
||||
}
|
||||
public CopyObject createCopyObject(String sourceBucket, String sourceObject,
|
||||
String destinationBucket, String destinationObject, CopyObjectOptions options) {
|
||||
return new CopyObject(amazonHost, parserFactory.createCopyObjectParser(), sourceBucket,
|
||||
sourceObject, destinationBucket, destinationObject, options);
|
||||
}
|
||||
|
||||
}
|
|
@ -36,22 +36,20 @@ import org.jclouds.http.HttpFutureCommand;
|
|||
*/
|
||||
public class S3FutureCommand<T> extends HttpFutureCommand<T> {
|
||||
|
||||
public S3FutureCommand(String method, String uri,
|
||||
ResponseCallable<T> responseCallable, String amazonHost,
|
||||
String bucketName) {
|
||||
super(method, uri, responseCallable);
|
||||
addHostHeader(checkNotNull(amazonHost, "amazonHost"), checkNotNull(
|
||||
bucketName, "bucketName"));
|
||||
}
|
||||
public S3FutureCommand(String method, String uri, ResponseCallable<T> responseCallable,
|
||||
String amazonHost, String bucketName) {
|
||||
super(method, uri, responseCallable);
|
||||
addHostHeader(checkNotNull(amazonHost, "amazonHost"), checkNotNull(bucketName, "bucketName"));
|
||||
}
|
||||
|
||||
public S3FutureCommand(String method, String uri,
|
||||
ResponseCallable<T> responseCallable, String amazonHost) {
|
||||
super(method, uri, responseCallable);
|
||||
addHostHeader(checkNotNull(amazonHost, "amazonHost"));
|
||||
}
|
||||
public S3FutureCommand(String method, String uri, ResponseCallable<T> responseCallable,
|
||||
String amazonHost) {
|
||||
super(method, uri, responseCallable);
|
||||
addHostHeader(checkNotNull(amazonHost, "amazonHost"));
|
||||
}
|
||||
|
||||
protected void addHostHeader(String amazonHost, String bucketName) {
|
||||
addHostHeader(checkNotNull(bucketName) + "." + amazonHost);
|
||||
}
|
||||
protected void addHostHeader(String amazonHost, String bucketName) {
|
||||
addHostHeader(checkNotNull(bucketName) + "." + amazonHost);
|
||||
}
|
||||
|
||||
}
|
|
@ -28,8 +28,8 @@ import java.util.Map.Entry;
|
|||
import org.jclouds.aws.s3.domain.S3Object;
|
||||
import org.jclouds.aws.s3.domain.S3Object.Metadata;
|
||||
import org.jclouds.aws.s3.reference.S3Headers;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.HttpException;
|
||||
import org.jclouds.http.HttpFutureCommand;
|
||||
import org.jclouds.http.HttpHeaders;
|
||||
|
@ -42,105 +42,88 @@ import com.google.inject.Inject;
|
|||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/latest/RESTObjectGET.html" />
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class ParseMetadataFromHeaders extends
|
||||
HttpFutureCommand.ResponseCallable<S3Object.Metadata> {
|
||||
private final DateService dateParser;
|
||||
private String key;
|
||||
public class ParseMetadataFromHeaders extends HttpFutureCommand.ResponseCallable<S3Object.Metadata> {
|
||||
private final DateService dateParser;
|
||||
private String key;
|
||||
|
||||
@Inject
|
||||
public ParseMetadataFromHeaders(DateService dateParser) {
|
||||
this.dateParser = dateParser;
|
||||
}
|
||||
@Inject
|
||||
public ParseMetadataFromHeaders(DateService dateParser) {
|
||||
this.dateParser = dateParser;
|
||||
}
|
||||
|
||||
/**
|
||||
* parses the http response headers to create a new
|
||||
* {@link org.jclouds.aws.s3.domain.S3Object.Metadata} object.
|
||||
*/
|
||||
public S3Object.Metadata call() throws HttpException {
|
||||
checkCode();
|
||||
/**
|
||||
* parses the http response headers to create a new
|
||||
* {@link org.jclouds.aws.s3.domain.S3Object.Metadata} object.
|
||||
*/
|
||||
public S3Object.Metadata call() throws HttpException {
|
||||
checkCode();
|
||||
|
||||
S3Object.Metadata metadata = new S3Object.Metadata(key);
|
||||
addAllHeadersTo(metadata);
|
||||
S3Object.Metadata metadata = new S3Object.Metadata(key);
|
||||
addAllHeadersTo(metadata);
|
||||
|
||||
addUserMetadataTo(metadata);
|
||||
addMd5To(metadata);
|
||||
addUserMetadataTo(metadata);
|
||||
addMd5To(metadata);
|
||||
|
||||
parseLastModifiedOrThrowException(metadata);
|
||||
setContentTypeOrThrowException(metadata);
|
||||
setContentLengthOrThrowException(metadata);
|
||||
parseLastModifiedOrThrowException(metadata);
|
||||
setContentTypeOrThrowException(metadata);
|
||||
setContentLengthOrThrowException(metadata);
|
||||
|
||||
metadata.setCacheControl(getResponse().getFirstHeaderOrNull(
|
||||
HttpHeaders.CACHE_CONTROL));
|
||||
metadata.setContentDisposition(getResponse().getFirstHeaderOrNull(
|
||||
HttpHeaders.CONTENT_DISPOSITION));
|
||||
metadata.setContentEncoding(getResponse().getFirstHeaderOrNull(
|
||||
HttpHeaders.CONTENT_ENCODING));
|
||||
return metadata;
|
||||
metadata.setCacheControl(getResponse().getFirstHeaderOrNull(HttpHeaders.CACHE_CONTROL));
|
||||
metadata.setContentDisposition(getResponse().getFirstHeaderOrNull(
|
||||
HttpHeaders.CONTENT_DISPOSITION));
|
||||
metadata.setContentEncoding(getResponse().getFirstHeaderOrNull(HttpHeaders.CONTENT_ENCODING));
|
||||
return metadata;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void addAllHeadersTo(Metadata metadata) {
|
||||
metadata.getAllHeaders().putAll(getResponse().getHeaders());
|
||||
}
|
||||
private void addAllHeadersTo(Metadata metadata) {
|
||||
metadata.getAllHeaders().putAll(getResponse().getHeaders());
|
||||
}
|
||||
|
||||
private void setContentTypeOrThrowException(S3Object.Metadata metadata)
|
||||
throws HttpException {
|
||||
String contentType = getResponse().getFirstHeaderOrNull(
|
||||
HttpHeaders.CONTENT_TYPE);
|
||||
if (contentType == null)
|
||||
throw new HttpException(HttpHeaders.CONTENT_TYPE
|
||||
+ " not found in headers");
|
||||
else
|
||||
metadata.setContentType(contentType);
|
||||
}
|
||||
private void setContentTypeOrThrowException(S3Object.Metadata metadata) throws HttpException {
|
||||
String contentType = getResponse().getFirstHeaderOrNull(HttpHeaders.CONTENT_TYPE);
|
||||
if (contentType == null)
|
||||
throw new HttpException(HttpHeaders.CONTENT_TYPE + " not found in headers");
|
||||
else
|
||||
metadata.setContentType(contentType);
|
||||
}
|
||||
|
||||
private void setContentLengthOrThrowException(S3Object.Metadata metadata)
|
||||
throws HttpException {
|
||||
String contentLength = getResponse().getFirstHeaderOrNull(
|
||||
HttpHeaders.CONTENT_LENGTH);
|
||||
if (contentLength == null)
|
||||
throw new HttpException(HttpHeaders.CONTENT_LENGTH
|
||||
+ " not found in headers");
|
||||
else
|
||||
metadata.setSize(Long.parseLong(contentLength));
|
||||
}
|
||||
private void setContentLengthOrThrowException(S3Object.Metadata metadata) throws HttpException {
|
||||
String contentLength = getResponse().getFirstHeaderOrNull(HttpHeaders.CONTENT_LENGTH);
|
||||
if (contentLength == null)
|
||||
throw new HttpException(HttpHeaders.CONTENT_LENGTH + " not found in headers");
|
||||
else
|
||||
metadata.setSize(Long.parseLong(contentLength));
|
||||
}
|
||||
|
||||
private void parseLastModifiedOrThrowException(S3Object.Metadata metadata)
|
||||
throws HttpException {
|
||||
String lastModified = getResponse().getFirstHeaderOrNull(
|
||||
HttpHeaders.LAST_MODIFIED);
|
||||
metadata.setLastModified(dateParser
|
||||
.rfc822DateParse(lastModified));
|
||||
if (metadata.getLastModified() == null)
|
||||
throw new HttpException("could not parse: "
|
||||
+ HttpHeaders.LAST_MODIFIED + ": " + lastModified);
|
||||
}
|
||||
private void parseLastModifiedOrThrowException(S3Object.Metadata metadata) throws HttpException {
|
||||
String lastModified = getResponse().getFirstHeaderOrNull(HttpHeaders.LAST_MODIFIED);
|
||||
metadata.setLastModified(dateParser.rfc822DateParse(lastModified));
|
||||
if (metadata.getLastModified() == null)
|
||||
throw new HttpException("could not parse: " + HttpHeaders.LAST_MODIFIED + ": "
|
||||
+ lastModified);
|
||||
}
|
||||
|
||||
private void addMd5To(S3Object.Metadata metadata) {
|
||||
String md5Header = getResponse()
|
||||
.getFirstHeaderOrNull(S3Headers.AMZ_MD5);
|
||||
if (md5Header != null) {
|
||||
metadata.setMd5(S3Utils.fromHexString(md5Header));
|
||||
}
|
||||
String eTag = getResponse().getFirstHeaderOrNull(S3Headers.ETAG);
|
||||
if (metadata.getMd5() == null && eTag != null) {
|
||||
metadata.setMd5(S3Utils.fromHexString(eTag.replaceAll("\"", "")));
|
||||
}
|
||||
}
|
||||
private void addMd5To(S3Object.Metadata metadata) {
|
||||
String md5Header = getResponse().getFirstHeaderOrNull(S3Headers.AMZ_MD5);
|
||||
if (md5Header != null) {
|
||||
metadata.setMd5(S3Utils.fromHexString(md5Header));
|
||||
}
|
||||
String eTag = getResponse().getFirstHeaderOrNull(S3Headers.ETAG);
|
||||
if (metadata.getMd5() == null && eTag != null) {
|
||||
metadata.setMd5(S3Utils.fromHexString(eTag.replaceAll("\"", "")));
|
||||
}
|
||||
}
|
||||
|
||||
private void addUserMetadataTo(S3Object.Metadata metadata) {
|
||||
for (Entry<String, String> header : getResponse().getHeaders()
|
||||
.entries()) {
|
||||
if (header.getKey() != null
|
||||
&& header.getKey().startsWith(
|
||||
S3Headers.USER_METADATA_PREFIX))
|
||||
metadata.getUserMetadata().put(header.getKey(),
|
||||
header.getValue());
|
||||
}
|
||||
}
|
||||
private void addUserMetadataTo(S3Object.Metadata metadata) {
|
||||
for (Entry<String, String> header : getResponse().getHeaders().entries()) {
|
||||
if (header.getKey() != null && header.getKey().startsWith(S3Headers.USER_METADATA_PREFIX))
|
||||
metadata.getUserMetadata().put(header.getKey(), header.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
}
|
|
@ -23,25 +23,29 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.commands.options;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import static com.google.common.base.Preconditions.*;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static com.google.common.base.Preconditions.checkState;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
||||
import org.jclouds.aws.s3.domain.acl.CannedAccessPolicy;
|
||||
import org.jclouds.aws.s3.reference.S3Headers;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.options.BaseHttpRequestOptions;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
|
||||
/**
|
||||
* Contains options supported in the REST API for the COPY object operation.
|
||||
* <p/>
|
||||
* <h2>Usage</h2> The recommended way to instantiate a CopyObjectOptions object
|
||||
* is to statically import CopyObjectOptions.Builder.* and invoke a static
|
||||
* creation method followed by an instance mutator (if needed):
|
||||
* <h2>Usage</h2> The recommended way to instantiate a CopyObjectOptions object is to statically
|
||||
* import CopyObjectOptions.Builder.* and invoke a static creation method followed by an instance
|
||||
* mutator (if needed):
|
||||
* <p/>
|
||||
* <code>
|
||||
* import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.*
|
||||
|
@ -59,269 +63,260 @@ import java.io.UnsupportedEncodingException;
|
|||
* ifSourceModifiedSince(new DateTime().minusDays(1))
|
||||
* );
|
||||
* <code>
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTObjectCOPY.html?"
|
||||
* @see <a
|
||||
* href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTObjectCOPY.html?"
|
||||
* />
|
||||
*/
|
||||
public class CopyObjectOptions extends BaseHttpRequestOptions {
|
||||
private final static DateService dateService = new DateService();
|
||||
private final static DateService dateService = new DateService();
|
||||
|
||||
public static final CopyObjectOptions NONE = new CopyObjectOptions();
|
||||
public static final CopyObjectOptions NONE = new CopyObjectOptions();
|
||||
|
||||
private Multimap<String, String> metadata;
|
||||
private Multimap<String, String> metadata;
|
||||
|
||||
private CannedAccessPolicy acl = CannedAccessPolicy.PRIVATE;
|
||||
private CannedAccessPolicy acl = CannedAccessPolicy.PRIVATE;
|
||||
|
||||
/**
|
||||
* Override the default ACL (private) with the specified one.
|
||||
*
|
||||
* @see CannedAccessPolicy
|
||||
*/
|
||||
public CopyObjectOptions overrideAcl(CannedAccessPolicy acl) {
|
||||
this.acl = checkNotNull(acl, "acl");
|
||||
if (!acl.equals(CannedAccessPolicy.PRIVATE))
|
||||
this.replaceHeader(S3Headers.CANNED_ACL, acl.toString());
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Override the default ACL (private) with the specified one.
|
||||
*
|
||||
* @see CannedAccessPolicy
|
||||
*/
|
||||
public CopyObjectOptions overrideAcl(CannedAccessPolicy acl) {
|
||||
this.acl = checkNotNull(acl, "acl");
|
||||
if (!acl.equals(CannedAccessPolicy.PRIVATE))
|
||||
this.replaceHeader(S3Headers.CANNED_ACL, acl.toString());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see CopyObjectOptions#overrideAcl(CannedAccessPolicy)
|
||||
*/
|
||||
public CannedAccessPolicy getAcl() {
|
||||
return acl;
|
||||
}
|
||||
/**
|
||||
* @see CopyObjectOptions#overrideAcl(CannedAccessPolicy)
|
||||
*/
|
||||
public CannedAccessPolicy getAcl() {
|
||||
return acl;
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the header x-amz-copy-source-if-unmodified-since
|
||||
* <p/>
|
||||
* Copies the object if it hasn't been modified since the specified time;
|
||||
* otherwise returns a 412 (precondition failed).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-match, but cannot be
|
||||
* used with other conditional copy headers.
|
||||
*
|
||||
* @return valid HTTP date
|
||||
* @see <a href="http://rfc.net/rfc2616.html?s3.3"/>
|
||||
* @see CopyObjectOptions#ifSourceModifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfModifiedSince() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-modified-since");
|
||||
}
|
||||
/**
|
||||
* For use in the header x-amz-copy-source-if-unmodified-since
|
||||
* <p/>
|
||||
* Copies the object if it hasn't been modified since the specified time; otherwise returns a 412
|
||||
* (precondition failed).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-match, but cannot be used with other
|
||||
* conditional copy headers.
|
||||
*
|
||||
* @return valid HTTP date
|
||||
* @see <a href="http://rfc.net/rfc2616.html?s3.3"/>
|
||||
* @see CopyObjectOptions#ifSourceModifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfModifiedSince() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-modified-since");
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the header x-amz-copy-source-if-modified-since
|
||||
* <p/>
|
||||
* Copies the object if it has been modified since the specified time;
|
||||
* otherwise returns a 412 (failed condition).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-none-match, but cannot
|
||||
* be used with other conditional copy headers.
|
||||
*
|
||||
* @return valid HTTP date
|
||||
* @see <a href="http://rfc.net/rfc2616.html?s3.3"/>
|
||||
* @see CopyObjectOptions#ifSourceUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfUnmodifiedSince() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-unmodified-since");
|
||||
}
|
||||
/**
|
||||
* For use in the header x-amz-copy-source-if-modified-since
|
||||
* <p/>
|
||||
* Copies the object if it has been modified since the specified time; otherwise returns a 412
|
||||
* (failed condition).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-none-match, but cannot be used with other
|
||||
* conditional copy headers.
|
||||
*
|
||||
* @return valid HTTP date
|
||||
* @see <a href="http://rfc.net/rfc2616.html?s3.3"/>
|
||||
* @see CopyObjectOptions#ifSourceUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfUnmodifiedSince() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-unmodified-since");
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the request header: x-amz-copy-source-if-match
|
||||
* <p/>
|
||||
* Copies the object if its entity tag (ETag) matches the specified tag;
|
||||
* otherwise return a 412 (precondition failed).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-unmodified-since, but
|
||||
* cannot be used with other conditional copy headers.
|
||||
*
|
||||
* @see CopyObjectOptions#ifSourceMd5Matches(byte[])
|
||||
*/
|
||||
public String getIfMatch() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-match");
|
||||
}
|
||||
/**
|
||||
* For use in the request header: x-amz-copy-source-if-match
|
||||
* <p/>
|
||||
* Copies the object if its entity tag (ETag) matches the specified tag; otherwise return a 412
|
||||
* (precondition failed).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-unmodified-since, but cannot be used with
|
||||
* other conditional copy headers.
|
||||
*
|
||||
* @see CopyObjectOptions#ifSourceMd5Matches(byte[])
|
||||
*/
|
||||
public String getIfMatch() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-match");
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the request header: x-amz-copy-source-if-none-match
|
||||
* <p/>
|
||||
* Copies the object if its entity tag (ETag) is different than the
|
||||
* specified Etag; otherwise returns a 412 (failed condition).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-modified-since, but
|
||||
* cannot be used with other conditional copy headers.
|
||||
*
|
||||
* @see CopyObjectOptions#ifSourceMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public String getIfNoneMatch() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-none-match");
|
||||
}
|
||||
/**
|
||||
* For use in the request header: x-amz-copy-source-if-none-match
|
||||
* <p/>
|
||||
* Copies the object if its entity tag (ETag) is different than the specified Etag; otherwise
|
||||
* returns a 412 (failed condition).
|
||||
* <p/>
|
||||
* This header can be used with x-amz-copy-source-if-modified-since, but cannot be used with
|
||||
* other conditional copy headers.
|
||||
*
|
||||
* @see CopyObjectOptions#ifSourceMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public String getIfNoneMatch() {
|
||||
return getFirstHeaderOrNull("x-amz-copy-source-if-none-match");
|
||||
}
|
||||
|
||||
/**
|
||||
* When not null, contains the header
|
||||
* [x-amz-copy-source-if-unmodified-since] -> [REPLACE] and metadata headers
|
||||
* passed in from the users.
|
||||
*
|
||||
* @see #overrideMetadataWith(Multimap)
|
||||
*/
|
||||
public Multimap<String, String> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
/**
|
||||
* When not null, contains the header [x-amz-copy-source-if-unmodified-since] -> [REPLACE] and
|
||||
* metadata headers passed in from the users.
|
||||
*
|
||||
* @see #overrideMetadataWith(Multimap)
|
||||
*/
|
||||
public Multimap<String, String> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only return the object if it has changed since this time.
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5Matches(byte[])} or
|
||||
* {@link #ifSourceUnmodifiedSince(DateTime)}
|
||||
*/
|
||||
public CopyObjectOptions ifSourceModifiedSince(DateTime ifModifiedSince) {
|
||||
checkState(getIfMatch() == null,
|
||||
"ifMd5Matches() is not compatible with ifModifiedSince()");
|
||||
checkState(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifModifiedSince()");
|
||||
replaceHeader("x-amz-copy-source-if-modified-since",
|
||||
dateService.rfc822DateFormat(checkNotNull(ifModifiedSince,
|
||||
"ifModifiedSince")));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Only return the object if it has changed since this time.
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5Matches(byte[])} or
|
||||
* {@link #ifSourceUnmodifiedSince(DateTime)}
|
||||
*/
|
||||
public CopyObjectOptions ifSourceModifiedSince(DateTime ifModifiedSince) {
|
||||
checkState(getIfMatch() == null, "ifMd5Matches() is not compatible with ifModifiedSince()");
|
||||
checkState(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifModifiedSince()");
|
||||
replaceHeader("x-amz-copy-source-if-modified-since", dateService
|
||||
.rfc822DateFormat(checkNotNull(ifModifiedSince, "ifModifiedSince")));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only return the object if it hasn't changed since this time.
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5DoesntMatch(byte[])} or
|
||||
* {@link #ifSourceModifiedSince(DateTime)}
|
||||
*/
|
||||
public CopyObjectOptions ifSourceUnmodifiedSince(DateTime ifUnmodifiedSince) {
|
||||
checkState(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifUnmodifiedSince()");
|
||||
checkState(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifUnmodifiedSince()");
|
||||
replaceHeader("x-amz-copy-source-if-unmodified-since", dateService
|
||||
.rfc822DateFormat(checkNotNull(ifUnmodifiedSince,
|
||||
"ifUnmodifiedSince")));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Only return the object if it hasn't changed since this time.
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5DoesntMatch(byte[])} or
|
||||
* {@link #ifSourceModifiedSince(DateTime)}
|
||||
*/
|
||||
public CopyObjectOptions ifSourceUnmodifiedSince(DateTime ifUnmodifiedSince) {
|
||||
checkState(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifUnmodifiedSince()");
|
||||
checkState(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifUnmodifiedSince()");
|
||||
replaceHeader("x-amz-copy-source-if-unmodified-since", dateService
|
||||
.rfc822DateFormat(checkNotNull(ifUnmodifiedSince, "ifUnmodifiedSince")));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The object's md5 hash should match the parameter <code>md5</code>.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5DoesntMatch(byte[])} or
|
||||
* {@link #ifSourceModifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5 hash representing the entity
|
||||
* @throws UnsupportedEncodingException if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public CopyObjectOptions ifSourceMd5Matches(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
checkState(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifMd5Matches()");
|
||||
checkState(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifMd5Matches()");
|
||||
replaceHeader("x-amz-copy-source-if-match", String.format("\"%1$s\"",
|
||||
S3Utils.toHexString(checkNotNull(md5, "md5"))));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* The object's md5 hash should match the parameter <code>md5</code>.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5DoesntMatch(byte[])} or
|
||||
* {@link #ifSourceModifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5
|
||||
* hash representing the entity
|
||||
* @throws UnsupportedEncodingException
|
||||
* if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public CopyObjectOptions ifSourceMd5Matches(byte[] md5) throws UnsupportedEncodingException {
|
||||
checkState(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifMd5Matches()");
|
||||
checkState(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifMd5Matches()");
|
||||
replaceHeader("x-amz-copy-source-if-match", String.format("\"%1$s\"", S3Utils
|
||||
.toHexString(checkNotNull(md5, "md5"))));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The object should not have a md5 hash corresponding with the parameter
|
||||
* <code>md5</code>.
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5Matches(byte[])} or
|
||||
* {@link #ifSourceUnmodifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5 hash representing the entity
|
||||
* @throws UnsupportedEncodingException if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public CopyObjectOptions ifSourceMd5DoesntMatch(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
checkState(getIfMatch() == null,
|
||||
"ifMd5Matches() is not compatible with ifMd5DoesntMatch()");
|
||||
Preconditions
|
||||
.checkState(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifMd5DoesntMatch()");
|
||||
replaceHeader("x-amz-copy-source-if-none-match", String.format(
|
||||
"\"%1$s\"", S3Utils.toHexString(checkNotNull(md5,
|
||||
"ifMd5DoesntMatch"))));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* The object should not have a md5 hash corresponding with the parameter <code>md5</code>.
|
||||
* <p/>
|
||||
* Not compatible with {@link #ifSourceMd5Matches(byte[])} or
|
||||
* {@link #ifSourceUnmodifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5
|
||||
* hash representing the entity
|
||||
* @throws UnsupportedEncodingException
|
||||
* if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public CopyObjectOptions ifSourceMd5DoesntMatch(byte[] md5) throws UnsupportedEncodingException {
|
||||
checkState(getIfMatch() == null, "ifMd5Matches() is not compatible with ifMd5DoesntMatch()");
|
||||
Preconditions.checkState(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifMd5DoesntMatch()");
|
||||
replaceHeader("x-amz-copy-source-if-none-match", String.format("\"%1$s\"", S3Utils
|
||||
.toHexString(checkNotNull(md5, "ifMd5DoesntMatch"))));
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Multimap<String, String> buildRequestHeaders() {
|
||||
Multimap<String, String> returnVal = HashMultimap.create();
|
||||
returnVal.putAll(headers);
|
||||
if (metadata != null) {
|
||||
returnVal.putAll(metadata);
|
||||
returnVal.put("x-amz-metadata-directive", "REPLACE");
|
||||
}
|
||||
return returnVal;
|
||||
}
|
||||
@Override
|
||||
public Multimap<String, String> buildRequestHeaders() {
|
||||
Multimap<String, String> returnVal = HashMultimap.create();
|
||||
returnVal.putAll(headers);
|
||||
if (metadata != null) {
|
||||
returnVal.putAll(metadata);
|
||||
returnVal.put("x-amz-metadata-directive", "REPLACE");
|
||||
}
|
||||
return returnVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the provided metadata instead of what is on the source object.
|
||||
*/
|
||||
public CopyObjectOptions overrideMetadataWith(
|
||||
Multimap<String, String> metadata) {
|
||||
checkNotNull(metadata, "metadata");
|
||||
for (String header : metadata.keySet()) {
|
||||
checkArgument(header.startsWith("x-amz-meta-"),
|
||||
"Metadata keys must start with x-amz-meta-");
|
||||
}
|
||||
this.metadata = metadata;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Use the provided metadata instead of what is on the source object.
|
||||
*/
|
||||
public CopyObjectOptions overrideMetadataWith(Multimap<String, String> metadata) {
|
||||
checkNotNull(metadata, "metadata");
|
||||
for (String header : metadata.keySet()) {
|
||||
checkArgument(header.startsWith("x-amz-meta-"),
|
||||
"Metadata keys must start with x-amz-meta-");
|
||||
}
|
||||
this.metadata = metadata;
|
||||
return this;
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
/**
|
||||
* @see CopyObjectOptions#overrideAcl(CannedAccessPolicy)
|
||||
*/
|
||||
public static CopyObjectOptions overrideAcl(CannedAccessPolicy acl) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.overrideAcl(acl);
|
||||
}
|
||||
public static class Builder {
|
||||
/**
|
||||
* @see CopyObjectOptions#overrideAcl(CannedAccessPolicy)
|
||||
*/
|
||||
public static CopyObjectOptions overrideAcl(CannedAccessPolicy acl) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.overrideAcl(acl);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see CopyObjectOptions#getIfModifiedSince()
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceModifiedSince(
|
||||
DateTime ifModifiedSince) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceModifiedSince(ifModifiedSince);
|
||||
}
|
||||
/**
|
||||
* @see CopyObjectOptions#getIfModifiedSince()
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceModifiedSince(DateTime ifModifiedSince) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceModifiedSince(ifModifiedSince);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see CopyObjectOptions#ifSourceUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceUnmodifiedSince(
|
||||
DateTime ifUnmodifiedSince) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceUnmodifiedSince(ifUnmodifiedSince);
|
||||
}
|
||||
/**
|
||||
* @see CopyObjectOptions#ifSourceUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceUnmodifiedSince(DateTime ifUnmodifiedSince) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceUnmodifiedSince(ifUnmodifiedSince);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see CopyObjectOptions#ifSourceMd5Matches(byte[])
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceMd5Matches(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceMd5Matches(md5);
|
||||
}
|
||||
/**
|
||||
* @see CopyObjectOptions#ifSourceMd5Matches(byte[])
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceMd5Matches(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceMd5Matches(md5);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see CopyObjectOptions#ifSourceMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceMd5DoesntMatch(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceMd5DoesntMatch(md5);
|
||||
}
|
||||
/**
|
||||
* @see CopyObjectOptions#ifSourceMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public static CopyObjectOptions ifSourceMd5DoesntMatch(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.ifSourceMd5DoesntMatch(md5);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see #overrideMetadataWith(Multimap)
|
||||
*/
|
||||
public static CopyObjectOptions overrideMetadataWith(
|
||||
Multimap<String, String> metadata) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.overrideMetadataWith(metadata);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @see #overrideMetadataWith(Multimap)
|
||||
*/
|
||||
public static CopyObjectOptions overrideMetadataWith(Multimap<String, String> metadata) {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
return options.overrideMetadataWith(metadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,14 +23,15 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.commands.options;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.HttpHeaders;
|
||||
import org.jclouds.http.options.BaseHttpRequestOptions;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -40,9 +41,9 @@ import com.google.common.collect.Multimap;
|
|||
|
||||
/**
|
||||
* Contains options supported in the REST API for the GET object operation. <h2>
|
||||
* Usage</h2> The recommended way to instantiate a GetObjectOptions object is to
|
||||
* statically import GetObjectOptions.Builder.* and invoke a static creation
|
||||
* method followed by an instance mutator (if needed):
|
||||
* Usage</h2> The recommended way to instantiate a GetObjectOptions object is to statically import
|
||||
* GetObjectOptions.Builder.* and invoke a static creation method followed by an instance mutator
|
||||
* (if needed):
|
||||
* <p/>
|
||||
* <code>
|
||||
* import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.*
|
||||
|
@ -53,254 +54,241 @@ import com.google.common.collect.Multimap;
|
|||
* Future<S3Object> object = connection.getObject("bucket","objectName",range(0,1024).ifUnmodifiedSince(new DateTime().minusDays(1)));
|
||||
* <code>
|
||||
*
|
||||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTObjectGET.html?"
|
||||
* @see <a
|
||||
* href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTObjectGET.html?"
|
||||
* />
|
||||
* @author Adrian Cole
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class GetObjectOptions extends BaseHttpRequestOptions {
|
||||
private final static DateService dateService = new DateService();
|
||||
public static final GetObjectOptions NONE = new GetObjectOptions();
|
||||
private final List<String> ranges = new ArrayList<String>();
|
||||
private final static DateService dateService = new DateService();
|
||||
public static final GetObjectOptions NONE = new GetObjectOptions();
|
||||
private final List<String> ranges = new ArrayList<String>();
|
||||
|
||||
@Override
|
||||
public Multimap<String, String> buildRequestHeaders() {
|
||||
Multimap<String, String> headers = super.buildRequestHeaders();
|
||||
String range = getRange();
|
||||
if (range != null)
|
||||
headers.put(HttpHeaders.RANGE, this.getRange());
|
||||
return headers;
|
||||
}
|
||||
@Override
|
||||
public Multimap<String, String> buildRequestHeaders() {
|
||||
Multimap<String, String> headers = super.buildRequestHeaders();
|
||||
String range = getRange();
|
||||
if (range != null)
|
||||
headers.put(HttpHeaders.RANGE, this.getRange());
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* download the specified range of the object.
|
||||
*/
|
||||
public GetObjectOptions range(long start, long end) {
|
||||
checkArgument(start >= 0, "start must be >= 0");
|
||||
checkArgument(end >= 0, "end must be >= 0");
|
||||
ranges.add(String.format("%d-%d", start, end));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* download the specified range of the object.
|
||||
*/
|
||||
public GetObjectOptions range(long start, long end) {
|
||||
checkArgument(start >= 0, "start must be >= 0");
|
||||
checkArgument(end >= 0, "end must be >= 0");
|
||||
ranges.add(String.format("%d-%d", start, end));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* download the object offset at <code>start</code>
|
||||
*/
|
||||
public GetObjectOptions startAt(long start) {
|
||||
checkArgument(start >= 0, "start must be >= 0");
|
||||
ranges.add(String.format("%d-", start));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* download the object offset at <code>start</code>
|
||||
*/
|
||||
public GetObjectOptions startAt(long start) {
|
||||
checkArgument(start >= 0, "start must be >= 0");
|
||||
ranges.add(String.format("%d-", start));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* download the last <code>count</code> bytes of the object
|
||||
*/
|
||||
public GetObjectOptions tail(long count) {
|
||||
checkArgument(count > 0, "count must be > 0");
|
||||
ranges.add(String.format("-%d", count));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* download the last <code>count</code> bytes of the object
|
||||
*/
|
||||
public GetObjectOptions tail(long count) {
|
||||
checkArgument(count > 0, "count must be > 0");
|
||||
ranges.add(String.format("-%d", count));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the header Range
|
||||
* <p />
|
||||
*
|
||||
* @see GetObjectOptions#range(long, long)
|
||||
*/
|
||||
public String getRange() {
|
||||
return (ranges.size() > 0) ? String.format("bytes=%s", Joiner.on(",")
|
||||
.join(ranges)) : null;
|
||||
}
|
||||
/**
|
||||
* For use in the header Range
|
||||
* <p />
|
||||
*
|
||||
* @see GetObjectOptions#range(long, long)
|
||||
*/
|
||||
public String getRange() {
|
||||
return (ranges.size() > 0) ? String.format("bytes=%s", Joiner.on(",").join(ranges)) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only return the object if it has changed since this time.
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5Matches(byte[])} or
|
||||
* {@link #ifUnmodifiedSince(DateTime)}
|
||||
*/
|
||||
public GetObjectOptions ifModifiedSince(DateTime ifModifiedSince) {
|
||||
checkArgument(getIfMatch() == null,
|
||||
"ifMd5Matches() is not compatible with ifModifiedSince()");
|
||||
checkArgument(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifModifiedSince()");
|
||||
this.headers.put(HttpHeaders.IF_MODIFIED_SINCE,
|
||||
dateService.rfc822DateFormat(checkNotNull(ifModifiedSince,
|
||||
"ifModifiedSince")));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Only return the object if it has changed since this time.
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5Matches(byte[])} or {@link #ifUnmodifiedSince(DateTime)}
|
||||
*/
|
||||
public GetObjectOptions ifModifiedSince(DateTime ifModifiedSince) {
|
||||
checkArgument(getIfMatch() == null, "ifMd5Matches() is not compatible with ifModifiedSince()");
|
||||
checkArgument(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifModifiedSince()");
|
||||
this.headers.put(HttpHeaders.IF_MODIFIED_SINCE, dateService.rfc822DateFormat(checkNotNull(
|
||||
ifModifiedSince, "ifModifiedSince")));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the header If-Modified-Since
|
||||
* <p />
|
||||
* Return the object only if it has been modified since the specified time,
|
||||
* otherwise return a 304 (not modified).
|
||||
*
|
||||
* @see GetObjectOptions#ifModifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfModifiedSince() {
|
||||
return this.getFirstHeaderOrNull(HttpHeaders.IF_MODIFIED_SINCE);
|
||||
}
|
||||
/**
|
||||
* For use in the header If-Modified-Since
|
||||
* <p />
|
||||
* Return the object only if it has been modified since the specified time, otherwise return a
|
||||
* 304 (not modified).
|
||||
*
|
||||
* @see GetObjectOptions#ifModifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfModifiedSince() {
|
||||
return this.getFirstHeaderOrNull(HttpHeaders.IF_MODIFIED_SINCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Only return the object if it hasn't changed since this time.
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5DoesntMatch(byte[])} or
|
||||
* {@link #ifModifiedSince(DateTime)}
|
||||
*/
|
||||
public GetObjectOptions ifUnmodifiedSince(DateTime ifUnmodifiedSince) {
|
||||
checkArgument(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifUnmodifiedSince()");
|
||||
checkArgument(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifUnmodifiedSince()");
|
||||
this.headers.put(HttpHeaders.IF_UNMODIFIED_SINCE, dateService
|
||||
.rfc822DateFormat(checkNotNull(ifUnmodifiedSince,
|
||||
"ifUnmodifiedSince")));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Only return the object if it hasn't changed since this time.
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5DoesntMatch(byte[])} or {@link #ifModifiedSince(DateTime)}
|
||||
*/
|
||||
public GetObjectOptions ifUnmodifiedSince(DateTime ifUnmodifiedSince) {
|
||||
checkArgument(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifUnmodifiedSince()");
|
||||
checkArgument(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifUnmodifiedSince()");
|
||||
this.headers.put(HttpHeaders.IF_UNMODIFIED_SINCE, dateService.rfc822DateFormat(checkNotNull(
|
||||
ifUnmodifiedSince, "ifUnmodifiedSince")));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the header If-Unmodified-Since
|
||||
* <p />
|
||||
* Return the object only if it has not been modified since the specified
|
||||
* time, otherwise return a 412 (precondition failed).
|
||||
*
|
||||
* @see GetObjectOptions#ifUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfUnmodifiedSince() {
|
||||
return this.getFirstHeaderOrNull(HttpHeaders.IF_UNMODIFIED_SINCE);
|
||||
}
|
||||
/**
|
||||
* For use in the header If-Unmodified-Since
|
||||
* <p />
|
||||
* Return the object only if it has not been modified since the specified time, otherwise return
|
||||
* a 412 (precondition failed).
|
||||
*
|
||||
* @see GetObjectOptions#ifUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public String getIfUnmodifiedSince() {
|
||||
return this.getFirstHeaderOrNull(HttpHeaders.IF_UNMODIFIED_SINCE);
|
||||
}
|
||||
|
||||
/**
|
||||
* The object's md5 hash should match the parameter <code>md5</code>.
|
||||
*
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5DoesntMatch(byte[])} or
|
||||
* {@link #ifModifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5
|
||||
* hash representing the entity
|
||||
* @throws UnsupportedEncodingException
|
||||
* if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public GetObjectOptions ifMd5Matches(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
checkArgument(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifMd5Matches()");
|
||||
checkArgument(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifMd5Matches()");
|
||||
this.headers.put(HttpHeaders.IF_MATCH, String.format("\"%1$s\"",
|
||||
S3Utils.toHexString(checkNotNull(md5, "md5"))));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* The object's md5 hash should match the parameter <code>md5</code>.
|
||||
*
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5DoesntMatch(byte[])} or {@link #ifModifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5
|
||||
* hash representing the entity
|
||||
* @throws UnsupportedEncodingException
|
||||
* if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public GetObjectOptions ifMd5Matches(byte[] md5) throws UnsupportedEncodingException {
|
||||
checkArgument(getIfNoneMatch() == null,
|
||||
"ifMd5DoesntMatch() is not compatible with ifMd5Matches()");
|
||||
checkArgument(getIfModifiedSince() == null,
|
||||
"ifModifiedSince() is not compatible with ifMd5Matches()");
|
||||
this.headers.put(HttpHeaders.IF_MATCH, String.format("\"%1$s\"", S3Utils
|
||||
.toHexString(checkNotNull(md5, "md5"))));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the request header: If-Match
|
||||
* <p />
|
||||
* Return the object only if its entity tag (ETag) is the same as the md5
|
||||
* specified, otherwise return a 412 (precondition failed).
|
||||
*
|
||||
* @see GetObjectOptions#ifMd5Matches(byte[])
|
||||
*/
|
||||
public String getIfMatch() {
|
||||
return this.getFirstHeaderOrNull(HttpHeaders.IF_MATCH);
|
||||
}
|
||||
/**
|
||||
* For use in the request header: If-Match
|
||||
* <p />
|
||||
* Return the object only if its entity tag (ETag) is the same as the md5 specified, otherwise
|
||||
* return a 412 (precondition failed).
|
||||
*
|
||||
* @see GetObjectOptions#ifMd5Matches(byte[])
|
||||
*/
|
||||
public String getIfMatch() {
|
||||
return this.getFirstHeaderOrNull(HttpHeaders.IF_MATCH);
|
||||
}
|
||||
|
||||
/**
|
||||
* The object should not have a md5 hash corresponding with the parameter
|
||||
* <code>md5</code>.
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5Matches(byte[])} or
|
||||
* {@link #ifUnmodifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5
|
||||
* hash representing the entity
|
||||
* @throws UnsupportedEncodingException
|
||||
* if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public GetObjectOptions ifMd5DoesntMatch(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
checkArgument(getIfMatch() == null,
|
||||
"ifMd5Matches() is not compatible with ifMd5DoesntMatch()");
|
||||
checkArgument(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifMd5DoesntMatch()");
|
||||
this.headers.put(HttpHeaders.IF_NONE_MATCH, String.format("\"%1$s\"",
|
||||
S3Utils.toHexString(checkNotNull(md5, "ifMd5DoesntMatch"))));
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* The object should not have a md5 hash corresponding with the parameter <code>md5</code>.
|
||||
* <p />
|
||||
* Not compatible with {@link #ifMd5Matches(byte[])} or {@link #ifUnmodifiedSince(DateTime)}
|
||||
*
|
||||
* @param md5
|
||||
* hash representing the entity
|
||||
* @throws UnsupportedEncodingException
|
||||
* if there was a problem converting this into an S3 eTag string
|
||||
*/
|
||||
public GetObjectOptions ifMd5DoesntMatch(byte[] md5) throws UnsupportedEncodingException {
|
||||
checkArgument(getIfMatch() == null,
|
||||
"ifMd5Matches() is not compatible with ifMd5DoesntMatch()");
|
||||
checkArgument(getIfUnmodifiedSince() == null,
|
||||
"ifUnmodifiedSince() is not compatible with ifMd5DoesntMatch()");
|
||||
this.headers.put(HttpHeaders.IF_NONE_MATCH, String.format("\"%1$s\"", S3Utils
|
||||
.toHexString(checkNotNull(md5, "ifMd5DoesntMatch"))));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For use in the request header: If-None-Match
|
||||
* <p />
|
||||
* Return the object only if its entity tag (ETag) is different from the one
|
||||
* specified, otherwise return a 304 (not modified).
|
||||
*
|
||||
* @see GetObjectOptions#ifMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public String getIfNoneMatch() {
|
||||
return this
|
||||
.getFirstHeaderOrNull(org.jclouds.http.HttpHeaders.IF_NONE_MATCH);
|
||||
}
|
||||
/**
|
||||
* For use in the request header: If-None-Match
|
||||
* <p />
|
||||
* Return the object only if its entity tag (ETag) is different from the one specified, otherwise
|
||||
* return a 304 (not modified).
|
||||
*
|
||||
* @see GetObjectOptions#ifMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public String getIfNoneMatch() {
|
||||
return this.getFirstHeaderOrNull(org.jclouds.http.HttpHeaders.IF_NONE_MATCH);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
public static class Builder {
|
||||
|
||||
/**
|
||||
* @see GetObjectOptions#range(long, long)
|
||||
*/
|
||||
public static GetObjectOptions range(long start, long end) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.range(start, end);
|
||||
}
|
||||
/**
|
||||
* @see GetObjectOptions#range(long, long)
|
||||
*/
|
||||
public static GetObjectOptions range(long start, long end) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.range(start, end);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see GetObjectOptions#startAt(long)
|
||||
*/
|
||||
public static GetObjectOptions startAt(long start) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.startAt(start);
|
||||
}
|
||||
/**
|
||||
* @see GetObjectOptions#startAt(long)
|
||||
*/
|
||||
public static GetObjectOptions startAt(long start) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.startAt(start);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see GetObjectOptions#tail(long)
|
||||
*/
|
||||
public static GetObjectOptions tail(long count) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.tail(count);
|
||||
}
|
||||
/**
|
||||
* @see GetObjectOptions#tail(long)
|
||||
*/
|
||||
public static GetObjectOptions tail(long count) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.tail(count);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see GetObjectOptions#getIfModifiedSince()
|
||||
*/
|
||||
public static GetObjectOptions ifModifiedSince(DateTime ifModifiedSince) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifModifiedSince(ifModifiedSince);
|
||||
}
|
||||
/**
|
||||
* @see GetObjectOptions#getIfModifiedSince()
|
||||
*/
|
||||
public static GetObjectOptions ifModifiedSince(DateTime ifModifiedSince) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifModifiedSince(ifModifiedSince);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see GetObjectOptions#ifUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public static GetObjectOptions ifUnmodifiedSince(
|
||||
DateTime ifUnmodifiedSince) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifUnmodifiedSince(ifUnmodifiedSince);
|
||||
}
|
||||
/**
|
||||
* @see GetObjectOptions#ifUnmodifiedSince(DateTime)
|
||||
*/
|
||||
public static GetObjectOptions ifUnmodifiedSince(DateTime ifUnmodifiedSince) {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifUnmodifiedSince(ifUnmodifiedSince);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see GetObjectOptions#ifMd5Matches(byte[])
|
||||
*/
|
||||
public static GetObjectOptions ifMd5Matches(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifMd5Matches(md5);
|
||||
}
|
||||
/**
|
||||
* @see GetObjectOptions#ifMd5Matches(byte[])
|
||||
*/
|
||||
public static GetObjectOptions ifMd5Matches(byte[] md5) throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifMd5Matches(md5);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see GetObjectOptions#ifMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public static GetObjectOptions ifMd5DoesntMatch(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifMd5DoesntMatch(md5);
|
||||
}
|
||||
/**
|
||||
* @see GetObjectOptions#ifMd5DoesntMatch(byte[])
|
||||
*/
|
||||
public static GetObjectOptions ifMd5DoesntMatch(byte[] md5)
|
||||
throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
return options.ifMd5DoesntMatch(md5);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
*/
|
||||
public ListBucketOptions withPrefix(String prefix)
|
||||
throws UnsupportedEncodingException {
|
||||
options.put("prefix", URLEncoder.encode(checkNotNull(prefix, "prefix"),
|
||||
parameters.put("prefix", URLEncoder.encode(checkNotNull(prefix, "prefix"),
|
||||
"UTF-8"));
|
||||
return this;
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
* @see ListBucketOptions#withPrefix(String)
|
||||
*/
|
||||
public String getPrefix() {
|
||||
return options.get("prefix");
|
||||
return parameters.get("prefix");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -81,7 +81,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
*/
|
||||
public ListBucketOptions afterMarker(String marker)
|
||||
throws UnsupportedEncodingException {
|
||||
options.put("marker", URLEncoder.encode(checkNotNull(marker, "marker"),
|
||||
parameters.put("marker", URLEncoder.encode(checkNotNull(marker, "marker"),
|
||||
"UTF-8"));
|
||||
return this;
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
* @see ListBucketOptions#afterMarker(String)
|
||||
*/
|
||||
public String getMarker() {
|
||||
return options.get("marker");
|
||||
return parameters.get("marker");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -99,7 +99,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
*/
|
||||
public ListBucketOptions maxResults(long maxKeys) {
|
||||
checkState(maxKeys >= 0, "maxKeys must be >= 0");
|
||||
options.put("max-keys", Long.toString(maxKeys));
|
||||
parameters.put("max-keys", Long.toString(maxKeys));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -107,7 +107,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
* @see ListBucketOptions#maxResults(long)
|
||||
*/
|
||||
public String getMaxKeys() {
|
||||
return options.get("max-keys");
|
||||
return parameters.get("max-keys");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -120,7 +120,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
*/
|
||||
public ListBucketOptions delimiter(String delimiter)
|
||||
throws UnsupportedEncodingException {
|
||||
options.put("delimiter", URLEncoder.encode(checkNotNull(delimiter,
|
||||
parameters.put("delimiter", URLEncoder.encode(checkNotNull(delimiter,
|
||||
"delimiter"), "UTF-8"));
|
||||
return this;
|
||||
}
|
||||
|
@ -129,7 +129,7 @@ public class ListBucketOptions extends BaseHttpRequestOptions {
|
|||
* @see ListBucketOptions#delimiter(String)
|
||||
*/
|
||||
public String getDelimiter() {
|
||||
return options.get("delimiter");
|
||||
return parameters.get("delimiter");
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
|
|
@ -23,18 +23,15 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.config;
|
||||
|
||||
import com.google.inject.*;
|
||||
import com.google.inject.assistedinject.FactoryProvider;
|
||||
import com.google.inject.name.Named;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.jclouds.aws.s3.S3Connection;
|
||||
import org.jclouds.aws.s3.S3Context;
|
||||
import org.jclouds.aws.s3.commands.config.S3CommandsModule;
|
||||
import org.jclouds.aws.s3.filters.RequestAuthorizeSignature;
|
||||
import org.jclouds.aws.s3.handlers.ParseS3ErrorFromXmlContent;
|
||||
import org.jclouds.aws.s3.internal.GuiceS3Context;
|
||||
import org.jclouds.aws.s3.internal.LiveS3Connection;
|
||||
import org.jclouds.aws.s3.internal.LiveS3InputStreamMap;
|
||||
import org.jclouds.aws.s3.internal.LiveS3ObjectMap;
|
||||
import org.jclouds.http.HttpConstants;
|
||||
import org.jclouds.http.HttpRequestFilter;
|
||||
import org.jclouds.http.HttpResponseHandler;
|
||||
|
@ -44,53 +41,52 @@ import org.jclouds.http.annotation.ServerErrorHandler;
|
|||
import org.jclouds.http.handlers.CloseContentAndSetExceptionHandler;
|
||||
import org.jclouds.logging.Logger;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import com.google.inject.AbstractModule;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Provides;
|
||||
import com.google.inject.Scopes;
|
||||
import com.google.inject.Singleton;
|
||||
import com.google.inject.name.Named;
|
||||
|
||||
/**
|
||||
* Configures the S3 connection, including logging and http transport.
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
@S3ConnectionModule
|
||||
public class LiveS3ConnectionModule extends AbstractModule {
|
||||
@Resource
|
||||
protected Logger logger = Logger.NULL;
|
||||
@Resource
|
||||
protected Logger logger = Logger.NULL;
|
||||
|
||||
@Inject
|
||||
@Named(HttpConstants.PROPERTY_HTTP_ADDRESS)
|
||||
String address;
|
||||
@Inject
|
||||
@Named(HttpConstants.PROPERTY_HTTP_PORT)
|
||||
int port;
|
||||
@Inject
|
||||
@Named(HttpConstants.PROPERTY_HTTP_SECURE)
|
||||
boolean isSecure;
|
||||
@Inject
|
||||
@Named(HttpConstants.PROPERTY_HTTP_ADDRESS)
|
||||
String address;
|
||||
@Inject
|
||||
@Named(HttpConstants.PROPERTY_HTTP_PORT)
|
||||
int port;
|
||||
@Inject
|
||||
@Named(HttpConstants.PROPERTY_HTTP_SECURE)
|
||||
boolean isSecure;
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(S3Connection.class).to(LiveS3Connection.class)
|
||||
.in(Scopes.SINGLETON);
|
||||
bind(HttpResponseHandler.class).annotatedWith(RedirectHandler.class)
|
||||
.to(CloseContentAndSetExceptionHandler.class).in(
|
||||
Scopes.SINGLETON);
|
||||
bind(HttpResponseHandler.class).annotatedWith(ClientErrorHandler.class)
|
||||
.to(ParseS3ErrorFromXmlContent.class).in(Scopes.SINGLETON);
|
||||
bind(HttpResponseHandler.class).annotatedWith(ServerErrorHandler.class)
|
||||
.to(ParseS3ErrorFromXmlContent.class).in(Scopes.SINGLETON);
|
||||
requestInjection(this);
|
||||
logger.info("S3 Context = %1$s://%2$s:%3$s", (isSecure ? "https"
|
||||
: "http"), address, port);
|
||||
}
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(S3Connection.class).to(LiveS3Connection.class).in(Scopes.SINGLETON);
|
||||
bind(HttpResponseHandler.class).annotatedWith(RedirectHandler.class).to(
|
||||
CloseContentAndSetExceptionHandler.class).in(Scopes.SINGLETON);
|
||||
bind(HttpResponseHandler.class).annotatedWith(ClientErrorHandler.class).to(
|
||||
ParseS3ErrorFromXmlContent.class).in(Scopes.SINGLETON);
|
||||
bind(HttpResponseHandler.class).annotatedWith(ServerErrorHandler.class).to(
|
||||
ParseS3ErrorFromXmlContent.class).in(Scopes.SINGLETON);
|
||||
requestInjection(this);
|
||||
logger.info("S3 Context = %1$s://%2$s:%3$s", (isSecure ? "https" : "http"), address, port);
|
||||
}
|
||||
|
||||
@Provides
|
||||
@Singleton
|
||||
List<HttpRequestFilter> provideRequestFilters(
|
||||
RequestAuthorizeSignature requestAuthorizeSignature) {
|
||||
List<HttpRequestFilter> filters = new ArrayList<HttpRequestFilter>();
|
||||
filters.add(requestAuthorizeSignature);
|
||||
return filters;
|
||||
}
|
||||
@Provides
|
||||
@Singleton
|
||||
List<HttpRequestFilter> provideRequestFilters(RequestAuthorizeSignature requestAuthorizeSignature) {
|
||||
List<HttpRequestFilter> filters = new ArrayList<HttpRequestFilter>();
|
||||
filters.add(requestAuthorizeSignature);
|
||||
return filters;
|
||||
}
|
||||
|
||||
}
|
|
@ -6,13 +6,11 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
|||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.jclouds.http.HttpFutureCommandClient;
|
||||
|
||||
/**
|
||||
* designates the the module configures a {@link org.jclouds.aws.s3.S3Connection}
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*
|
||||
*
|
||||
*/
|
||||
@Retention(RUNTIME)
|
||||
@Target(TYPE)
|
||||
|
|
|
@ -23,8 +23,6 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.config;
|
||||
|
||||
import com.google.inject.AbstractModule;
|
||||
import com.google.inject.assistedinject.FactoryProvider;
|
||||
import org.jclouds.aws.s3.S3Connection;
|
||||
import org.jclouds.aws.s3.S3Context;
|
||||
import org.jclouds.aws.s3.commands.config.S3CommandsModule;
|
||||
|
@ -32,28 +30,28 @@ import org.jclouds.aws.s3.internal.GuiceS3Context;
|
|||
import org.jclouds.aws.s3.internal.LiveS3InputStreamMap;
|
||||
import org.jclouds.aws.s3.internal.LiveS3ObjectMap;
|
||||
|
||||
import com.google.inject.AbstractModule;
|
||||
import com.google.inject.assistedinject.FactoryProvider;
|
||||
|
||||
/**
|
||||
* Configures the {@link S3Context}; requires {@link S3Connection} bound.
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class S3ContextModule extends AbstractModule {
|
||||
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
this.requireBinding(S3Connection.class);
|
||||
install(new S3CommandsModule());
|
||||
bind(GuiceS3Context.S3ObjectMapFactory.class).toProvider(
|
||||
FactoryProvider.newFactory(
|
||||
GuiceS3Context.S3ObjectMapFactory.class,
|
||||
@Override
|
||||
protected void configure() {
|
||||
this.requireBinding(S3Connection.class);
|
||||
install(new S3CommandsModule());
|
||||
bind(GuiceS3Context.S3ObjectMapFactory.class).toProvider(
|
||||
FactoryProvider.newFactory(GuiceS3Context.S3ObjectMapFactory.class,
|
||||
LiveS3ObjectMap.class));
|
||||
bind(GuiceS3Context.S3InputStreamMapFactory.class).toProvider(
|
||||
FactoryProvider.newFactory(
|
||||
GuiceS3Context.S3InputStreamMapFactory.class,
|
||||
bind(GuiceS3Context.S3InputStreamMapFactory.class).toProvider(
|
||||
FactoryProvider.newFactory(GuiceS3Context.S3InputStreamMapFactory.class,
|
||||
LiveS3InputStreamMap.class));
|
||||
bind(S3Context.class).to(GuiceS3Context.class);
|
||||
bind(S3Context.class).to(GuiceS3Context.class);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,8 +30,8 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.jclouds.aws.s3.reference.S3Constants;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.HttpException;
|
||||
import org.jclouds.http.HttpHeaders;
|
||||
import org.jclouds.http.HttpRequest;
|
||||
|
@ -43,150 +43,136 @@ import com.google.inject.name.Named;
|
|||
/**
|
||||
* Signs the S3 request. This will update timestamps at most once per second.
|
||||
*
|
||||
* @see <a href=
|
||||
* "http://docs.amazonwebservices.com/AmazonS3/latest/RESTAuthentication.html"
|
||||
* />
|
||||
* @see <a href= "http://docs.amazonwebservices.com/AmazonS3/latest/RESTAuthentication.html" />
|
||||
* @author Adrian Cole
|
||||
*
|
||||
*/
|
||||
public class RequestAuthorizeSignature implements HttpRequestFilter {
|
||||
private static final String[] firstHeadersToSign = new String[] {
|
||||
HttpHeaders.CONTENT_MD5, HttpHeaders.CONTENT_TYPE, HttpHeaders.DATE };
|
||||
private static final String[] firstHeadersToSign = new String[] { HttpHeaders.CONTENT_MD5,
|
||||
HttpHeaders.CONTENT_TYPE, HttpHeaders.DATE };
|
||||
|
||||
private final String accessKey;
|
||||
private final String secretKey;
|
||||
private final DateService dateService;
|
||||
private final String accessKey;
|
||||
private final String secretKey;
|
||||
private final DateService dateService;
|
||||
|
||||
public static final long BILLION = 1000000000;
|
||||
private final AtomicReference<String> timeStamp;
|
||||
private final AtomicLong trigger = new AtomicLong(System.nanoTime() + 1
|
||||
* BILLION);
|
||||
public static final long BILLION = 1000000000;
|
||||
private final AtomicReference<String> timeStamp;
|
||||
private final AtomicLong trigger = new AtomicLong(System.nanoTime() + 1 * BILLION);
|
||||
|
||||
/**
|
||||
* Start the time update service. Amazon clocks need to be within 900
|
||||
* seconds of the request time. This method updates the clock every second.
|
||||
* This is not performed per-request, as creation of the date object is a
|
||||
* slow, synchronized command.
|
||||
*/
|
||||
synchronized void updateIfTimeOut() {
|
||||
/**
|
||||
* Start the time update service. Amazon clocks need to be within 900 seconds of the request
|
||||
* time. This method updates the clock every second. This is not performed per-request, as
|
||||
* creation of the date object is a slow, synchronized command.
|
||||
*/
|
||||
synchronized void updateIfTimeOut() {
|
||||
|
||||
if (trigger.get() - System.nanoTime() <= 0) {
|
||||
timeStamp.set(createNewStamp());
|
||||
trigger.set(System.nanoTime() + 1 * BILLION);
|
||||
}
|
||||
if (trigger.get() - System.nanoTime() <= 0) {
|
||||
timeStamp.set(createNewStamp());
|
||||
trigger.set(System.nanoTime() + 1 * BILLION);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// this is a hotspot when submitted concurrently, so be lazy.
|
||||
// amazon is ok with up to 15 minutes off their time, so let's
|
||||
// be as lazy as possible.
|
||||
String createNewStamp() {
|
||||
return dateService.rfc822DateFormat();
|
||||
}
|
||||
// this is a hotspot when submitted concurrently, so be lazy.
|
||||
// amazon is ok with up to 15 minutes off their time, so let's
|
||||
// be as lazy as possible.
|
||||
String createNewStamp() {
|
||||
return dateService.rfc822DateFormat();
|
||||
}
|
||||
|
||||
public String timestampAsHeaderString() {
|
||||
updateIfTimeOut();
|
||||
return timeStamp.get();
|
||||
}
|
||||
public String timestampAsHeaderString() {
|
||||
updateIfTimeOut();
|
||||
return timeStamp.get();
|
||||
}
|
||||
|
||||
@Inject
|
||||
public RequestAuthorizeSignature(
|
||||
@Named(S3Constants.PROPERTY_AWS_ACCESSKEYID) String accessKey,
|
||||
@Named(S3Constants.PROPERTY_AWS_SECRETACCESSKEY) String secretKey,
|
||||
DateService dateService) {
|
||||
this.accessKey = accessKey;
|
||||
this.secretKey = secretKey;
|
||||
this.dateService = dateService;
|
||||
timeStamp = new AtomicReference<String>(createNewStamp());
|
||||
}
|
||||
@Inject
|
||||
public RequestAuthorizeSignature(@Named(S3Constants.PROPERTY_AWS_ACCESSKEYID) String accessKey,
|
||||
@Named(S3Constants.PROPERTY_AWS_SECRETACCESSKEY) String secretKey,
|
||||
DateService dateService) {
|
||||
this.accessKey = accessKey;
|
||||
this.secretKey = secretKey;
|
||||
this.dateService = dateService;
|
||||
timeStamp = new AtomicReference<String>(createNewStamp());
|
||||
}
|
||||
|
||||
public void filter(HttpRequest request) throws HttpException {
|
||||
// re-sign the request
|
||||
removeOldHeaders(request);
|
||||
public void filter(HttpRequest request) throws HttpException {
|
||||
// re-sign the request
|
||||
removeOldHeaders(request);
|
||||
|
||||
addDateHeader(request);
|
||||
addDateHeader(request);
|
||||
|
||||
String toSign = createStringToSign(request);
|
||||
String toSign = createStringToSign(request);
|
||||
|
||||
addAuthHeader(request, toSign);
|
||||
}
|
||||
addAuthHeader(request, toSign);
|
||||
}
|
||||
|
||||
public static String createStringToSign(HttpRequest request) {
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
appendMethod(request, buffer);
|
||||
appendHttpHeaders(request, buffer);
|
||||
appendAmzHeaders(request, buffer);
|
||||
appendBucketName(request, buffer);
|
||||
appendUriPath(request, buffer);
|
||||
return buffer.toString();
|
||||
}
|
||||
public static String createStringToSign(HttpRequest request) {
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
appendMethod(request, buffer);
|
||||
appendHttpHeaders(request, buffer);
|
||||
appendAmzHeaders(request, buffer);
|
||||
appendBucketName(request, buffer);
|
||||
appendUriPath(request, buffer);
|
||||
return buffer.toString();
|
||||
}
|
||||
|
||||
private void removeOldHeaders(HttpRequest request) {
|
||||
request.getHeaders().removeAll(S3Constants.AUTHORIZATION);
|
||||
request.getHeaders().removeAll(HttpHeaders.CONTENT_TYPE);
|
||||
request.getHeaders().removeAll(HttpHeaders.DATE);
|
||||
}
|
||||
private void removeOldHeaders(HttpRequest request) {
|
||||
request.getHeaders().removeAll(S3Constants.AUTHORIZATION);
|
||||
request.getHeaders().removeAll(HttpHeaders.CONTENT_TYPE);
|
||||
request.getHeaders().removeAll(HttpHeaders.DATE);
|
||||
}
|
||||
|
||||
private void addAuthHeader(HttpRequest request, String toSign)
|
||||
throws HttpException {
|
||||
String signature;
|
||||
try {
|
||||
signature = S3Utils.hmacSha1Base64(toSign, secretKey.getBytes());
|
||||
} catch (Exception e) {
|
||||
throw new HttpException("error signing request", e);
|
||||
}
|
||||
request.getHeaders().put(S3Constants.AUTHORIZATION,
|
||||
"AWS " + accessKey + ":" + signature);
|
||||
}
|
||||
private void addAuthHeader(HttpRequest request, String toSign) throws HttpException {
|
||||
String signature;
|
||||
try {
|
||||
signature = S3Utils.hmacSha1Base64(toSign, secretKey.getBytes());
|
||||
} catch (Exception e) {
|
||||
throw new HttpException("error signing request", e);
|
||||
}
|
||||
request.getHeaders().put(S3Constants.AUTHORIZATION, "AWS " + accessKey + ":" + signature);
|
||||
}
|
||||
|
||||
private static void appendMethod(HttpRequest request, StringBuilder toSign) {
|
||||
toSign.append(request.getMethod()).append("\n");
|
||||
}
|
||||
private static void appendMethod(HttpRequest request, StringBuilder toSign) {
|
||||
toSign.append(request.getMethod()).append("\n");
|
||||
}
|
||||
|
||||
private void addDateHeader(HttpRequest request) {
|
||||
request.getHeaders().put(HttpHeaders.DATE, timestampAsHeaderString());
|
||||
}
|
||||
private void addDateHeader(HttpRequest request) {
|
||||
request.getHeaders().put(HttpHeaders.DATE, timestampAsHeaderString());
|
||||
}
|
||||
|
||||
private static void appendAmzHeaders(HttpRequest request,
|
||||
StringBuilder toSign) {
|
||||
Set<String> headers = new TreeSet<String>(request.getHeaders().keySet());
|
||||
for (String header : headers) {
|
||||
if (header.startsWith("x-amz-")) {
|
||||
toSign.append(header).append(":");
|
||||
for (String value : request.getHeaders().get(header))
|
||||
toSign.append(value.replaceAll("\r?\n", "")).append(",");
|
||||
toSign.deleteCharAt(toSign.lastIndexOf(","));
|
||||
toSign.append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
private static void appendAmzHeaders(HttpRequest request, StringBuilder toSign) {
|
||||
Set<String> headers = new TreeSet<String>(request.getHeaders().keySet());
|
||||
for (String header : headers) {
|
||||
if (header.startsWith("x-amz-")) {
|
||||
toSign.append(header).append(":");
|
||||
for (String value : request.getHeaders().get(header))
|
||||
toSign.append(value.replaceAll("\r?\n", "")).append(",");
|
||||
toSign.deleteCharAt(toSign.lastIndexOf(","));
|
||||
toSign.append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void appendHttpHeaders(HttpRequest request,
|
||||
StringBuilder toSign) {
|
||||
for (String header : firstHeadersToSign)
|
||||
toSign.append(valueOrEmpty(request.getHeaders().get(header)))
|
||||
.append("\n");
|
||||
}
|
||||
private static void appendHttpHeaders(HttpRequest request, StringBuilder toSign) {
|
||||
for (String header : firstHeadersToSign)
|
||||
toSign.append(valueOrEmpty(request.getHeaders().get(header))).append("\n");
|
||||
}
|
||||
|
||||
private static void appendBucketName(HttpRequest request,
|
||||
StringBuilder toSign) {
|
||||
String hostHeader = request.getHeaders().get(HttpHeaders.HOST)
|
||||
.iterator().next();
|
||||
if (hostHeader.endsWith(".s3.amazonaws.com"))
|
||||
toSign.append("/").append(
|
||||
hostHeader.substring(0, hostHeader.length() - 17));
|
||||
}
|
||||
private static void appendBucketName(HttpRequest request, StringBuilder toSign) {
|
||||
String hostHeader = request.getHeaders().get(HttpHeaders.HOST).iterator().next();
|
||||
if (hostHeader.endsWith(".s3.amazonaws.com"))
|
||||
toSign.append("/").append(hostHeader.substring(0, hostHeader.length() - 17));
|
||||
}
|
||||
|
||||
private static void appendUriPath(HttpRequest request, StringBuilder toSign) {
|
||||
int queryIndex = request.getUri().indexOf('?');
|
||||
if (queryIndex >= 0)
|
||||
toSign.append(request.getUri().substring(0, queryIndex));
|
||||
else
|
||||
toSign.append(request.getUri());
|
||||
}
|
||||
private static void appendUriPath(HttpRequest request, StringBuilder toSign) {
|
||||
int queryIndex = request.getUri().indexOf('?');
|
||||
if (queryIndex >= 0)
|
||||
toSign.append(request.getUri().substring(0, queryIndex));
|
||||
else
|
||||
toSign.append(request.getUri());
|
||||
}
|
||||
|
||||
private static String valueOrEmpty(Collection<String> collection) {
|
||||
return (collection != null && collection.size() >= 1) ? collection
|
||||
.iterator().next() : "";
|
||||
}
|
||||
private static String valueOrEmpty(Collection<String> collection) {
|
||||
return (collection != null && collection.size() >= 1) ? collection.iterator().next() : "";
|
||||
}
|
||||
}
|
|
@ -23,8 +23,10 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.internal;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Injector;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.jclouds.aws.s3.S3Connection;
|
||||
import org.jclouds.aws.s3.S3Context;
|
||||
import org.jclouds.aws.s3.S3InputStreamMap;
|
||||
|
@ -32,75 +34,74 @@ import org.jclouds.aws.s3.S3ObjectMap;
|
|||
import org.jclouds.lifecycle.Closer;
|
||||
import org.jclouds.logging.Logger;
|
||||
|
||||
import javax.annotation.Resource;
|
||||
import java.io.IOException;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Injector;
|
||||
|
||||
/**
|
||||
* Uses a Guice Injector to configure the objects served by S3Context methods.
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
* @see Injector
|
||||
*/
|
||||
public class GuiceS3Context implements S3Context {
|
||||
public interface S3ObjectMapFactory {
|
||||
S3ObjectMap createMapView(String bucket);
|
||||
}
|
||||
public interface S3ObjectMapFactory {
|
||||
S3ObjectMap createMapView(String bucket);
|
||||
}
|
||||
|
||||
public interface S3InputStreamMapFactory {
|
||||
S3InputStreamMap createMapView(String bucket);
|
||||
}
|
||||
public interface S3InputStreamMapFactory {
|
||||
S3InputStreamMap createMapView(String bucket);
|
||||
}
|
||||
|
||||
@Resource
|
||||
private Logger logger = Logger.NULL;
|
||||
private final Injector injector;
|
||||
private final S3InputStreamMapFactory s3InputStreamMapFactory;
|
||||
private final S3ObjectMapFactory s3ObjectMapFactory;
|
||||
private final Closer closer;
|
||||
@Resource
|
||||
private Logger logger = Logger.NULL;
|
||||
private final Injector injector;
|
||||
private final S3InputStreamMapFactory s3InputStreamMapFactory;
|
||||
private final S3ObjectMapFactory s3ObjectMapFactory;
|
||||
private final Closer closer;
|
||||
|
||||
@Inject
|
||||
private GuiceS3Context(Injector injector, Closer closer,
|
||||
S3ObjectMapFactory s3ObjectMapFactory,
|
||||
S3InputStreamMapFactory s3InputStreamMapFactory) {
|
||||
this.injector = injector;
|
||||
this.s3InputStreamMapFactory = s3InputStreamMapFactory;
|
||||
this.s3ObjectMapFactory = s3ObjectMapFactory;
|
||||
this.closer = closer;
|
||||
}
|
||||
@Inject
|
||||
private GuiceS3Context(Injector injector, Closer closer, S3ObjectMapFactory s3ObjectMapFactory,
|
||||
S3InputStreamMapFactory s3InputStreamMapFactory) {
|
||||
this.injector = injector;
|
||||
this.s3InputStreamMapFactory = s3InputStreamMapFactory;
|
||||
this.s3ObjectMapFactory = s3ObjectMapFactory;
|
||||
this.closer = closer;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public S3Connection getConnection() {
|
||||
return injector.getInstance(S3Connection.class);
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public S3Connection getConnection() {
|
||||
return injector.getInstance(S3Connection.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public S3InputStreamMap createInputStreamMap(String bucket) {
|
||||
getConnection().putBucketIfNotExists(bucket);
|
||||
return s3InputStreamMapFactory.createMapView(bucket);
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public S3InputStreamMap createInputStreamMap(String bucket) {
|
||||
getConnection().putBucketIfNotExists(bucket);
|
||||
return s3InputStreamMapFactory.createMapView(bucket);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public S3ObjectMap createS3ObjectMap(String bucket) {
|
||||
getConnection().putBucketIfNotExists(bucket);
|
||||
return s3ObjectMapFactory.createMapView(bucket);
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public S3ObjectMap createS3ObjectMap(String bucket) {
|
||||
getConnection().putBucketIfNotExists(bucket);
|
||||
return s3ObjectMapFactory.createMapView(bucket);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see Closer
|
||||
*/
|
||||
public void close() {
|
||||
try {
|
||||
closer.close();
|
||||
} catch (IOException e) {
|
||||
logger.error(e, "error closing content");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see Closer
|
||||
*/
|
||||
public void close() {
|
||||
try {
|
||||
closer.close();
|
||||
} catch (IOException e) {
|
||||
logger.error(e, "error closing content");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -53,191 +53,180 @@ import com.google.inject.Inject;
|
|||
/**
|
||||
* Uses {@link HttpFutureCommandClient} to invoke the REST API of S3.
|
||||
*
|
||||
* @see <a
|
||||
* href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?"
|
||||
* />
|
||||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?" />
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class LiveS3Connection implements S3Connection {
|
||||
|
||||
private final HttpFutureCommandClient client;
|
||||
/**
|
||||
* creates command objects that can be submitted to the client
|
||||
*/
|
||||
private final S3CommandFactory factory;
|
||||
private final HttpFutureCommandClient client;
|
||||
/**
|
||||
* creates command objects that can be submitted to the client
|
||||
*/
|
||||
private final S3CommandFactory factory;
|
||||
|
||||
@Inject
|
||||
public LiveS3Connection(HttpFutureCommandClient client,
|
||||
S3CommandFactory factory) {
|
||||
this.client = client;
|
||||
this.factory = factory;
|
||||
}
|
||||
@Inject
|
||||
public LiveS3Connection(HttpFutureCommandClient client, S3CommandFactory factory) {
|
||||
this.client = client;
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see GetObject
|
||||
*/
|
||||
public Future<S3Object> getObject(String s3Bucket, String key) {
|
||||
return getObject(s3Bucket, key, GetObjectOptions.NONE);
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see GetObject
|
||||
*/
|
||||
public Future<S3Object> getObject(String s3Bucket, String key) {
|
||||
return getObject(s3Bucket, key, GetObjectOptions.NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see GetObject
|
||||
*/
|
||||
public Future<S3Object> getObject(String s3Bucket, String key,
|
||||
GetObjectOptions options) {
|
||||
GetObject getObject = factory.createGetObject(s3Bucket, key, options);
|
||||
client.submit(getObject);
|
||||
return getObject;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see GetObject
|
||||
*/
|
||||
public Future<S3Object> getObject(String s3Bucket, String key, GetObjectOptions options) {
|
||||
GetObject getObject = factory.createGetObject(s3Bucket, key, options);
|
||||
client.submit(getObject);
|
||||
return getObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see HeadObject
|
||||
*/
|
||||
public Future<S3Object.Metadata> headObject(String s3Bucket, String key) {
|
||||
HeadObject headMetadata = factory.createHeadMetadata(s3Bucket, key);
|
||||
client.submit(headMetadata);
|
||||
return headMetadata;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see HeadObject
|
||||
*/
|
||||
public Future<S3Object.Metadata> headObject(String s3Bucket, String key) {
|
||||
HeadObject headMetadata = factory.createHeadMetadata(s3Bucket, key);
|
||||
client.submit(headMetadata);
|
||||
return headMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see DeleteObject
|
||||
*/
|
||||
public Future<Boolean> deleteObject(String s3Bucket, String key) {
|
||||
DeleteObject deleteObject = factory.createDeleteObject(s3Bucket, key);
|
||||
client.submit(deleteObject);
|
||||
return deleteObject;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see DeleteObject
|
||||
*/
|
||||
public Future<Boolean> deleteObject(String s3Bucket, String key) {
|
||||
DeleteObject deleteObject = factory.createDeleteObject(s3Bucket, key);
|
||||
client.submit(deleteObject);
|
||||
return deleteObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutObject
|
||||
*/
|
||||
public Future<byte[]> putObject(String s3Bucket, S3Object object) {
|
||||
return putObject(s3Bucket, object, PutObjectOptions.NONE);
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutObject
|
||||
*/
|
||||
public Future<byte[]> putObject(String s3Bucket, S3Object object) {
|
||||
return putObject(s3Bucket, object, PutObjectOptions.NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutObject
|
||||
*/
|
||||
public Future<byte[]> putObject(String bucketName, S3Object object,
|
||||
PutObjectOptions options) {
|
||||
PutObject putObject = factory.createPutObject(bucketName, object,
|
||||
options);
|
||||
client.submit(putObject);
|
||||
return putObject;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutObject
|
||||
*/
|
||||
public Future<byte[]> putObject(String bucketName, S3Object object, PutObjectOptions options) {
|
||||
PutObject putObject = factory.createPutObject(bucketName, object, options);
|
||||
client.submit(putObject);
|
||||
return putObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutBucket
|
||||
*/
|
||||
public Future<Boolean> putBucketIfNotExists(String s3Bucket) {
|
||||
return putBucketIfNotExists(s3Bucket, PutBucketOptions.NONE);
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutBucket
|
||||
*/
|
||||
public Future<Boolean> putBucketIfNotExists(String s3Bucket) {
|
||||
return putBucketIfNotExists(s3Bucket, PutBucketOptions.NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutBucket
|
||||
*/
|
||||
public Future<Boolean> putBucketIfNotExists(String s3Bucket,
|
||||
PutBucketOptions options) {
|
||||
PutBucket putBucket = factory.createPutBucket(s3Bucket, options);
|
||||
client.submit(putBucket);
|
||||
return putBucket;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see PutBucket
|
||||
*/
|
||||
public Future<Boolean> putBucketIfNotExists(String s3Bucket, PutBucketOptions options) {
|
||||
PutBucket putBucket = factory.createPutBucket(s3Bucket, options);
|
||||
client.submit(putBucket);
|
||||
return putBucket;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see DeleteBucket
|
||||
*/
|
||||
public Future<Boolean> deleteBucketIfEmpty(String s3Bucket) {
|
||||
DeleteBucket deleteBucket = factory.createDeleteBucket(s3Bucket);
|
||||
client.submit(deleteBucket);
|
||||
return deleteBucket;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see DeleteBucket
|
||||
*/
|
||||
public Future<Boolean> deleteBucketIfEmpty(String s3Bucket) {
|
||||
DeleteBucket deleteBucket = factory.createDeleteBucket(s3Bucket);
|
||||
client.submit(deleteBucket);
|
||||
return deleteBucket;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see CopyObject
|
||||
*/
|
||||
public Future<S3Object.Metadata> copyObject(String sourceBucket,
|
||||
String sourceObject, String destinationBucket,
|
||||
String destinationObject) {
|
||||
return copyObject(sourceBucket, sourceObject, destinationBucket,
|
||||
destinationObject, new CopyObjectOptions());
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see CopyObject
|
||||
*/
|
||||
public Future<S3Object.Metadata> copyObject(String sourceBucket, String sourceObject,
|
||||
String destinationBucket, String destinationObject) {
|
||||
return copyObject(sourceBucket, sourceObject, destinationBucket, destinationObject,
|
||||
new CopyObjectOptions());
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see CopyObject
|
||||
*/
|
||||
public Future<S3Object.Metadata> copyObject(String sourceBucket,
|
||||
String sourceObject, String destinationBucket,
|
||||
String destinationObject, CopyObjectOptions options) {
|
||||
CopyObject copy = factory.createCopyObject(sourceBucket, sourceObject,
|
||||
destinationBucket, destinationObject, options);
|
||||
client.submit(copy);
|
||||
return copy;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see CopyObject
|
||||
*/
|
||||
public Future<S3Object.Metadata> copyObject(String sourceBucket, String sourceObject,
|
||||
String destinationBucket, String destinationObject, CopyObjectOptions options) {
|
||||
CopyObject copy = factory.createCopyObject(sourceBucket, sourceObject, destinationBucket,
|
||||
destinationObject, options);
|
||||
client.submit(copy);
|
||||
return copy;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see BucketExists
|
||||
*/
|
||||
public Future<Boolean> bucketExists(String s3Bucket) {
|
||||
BucketExists headRequestObject = factory.createHeadBucket(s3Bucket);
|
||||
client.submit(headRequestObject);
|
||||
return headRequestObject;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see BucketExists
|
||||
*/
|
||||
public Future<Boolean> bucketExists(String s3Bucket) {
|
||||
BucketExists headRequestObject = factory.createHeadBucket(s3Bucket);
|
||||
client.submit(headRequestObject);
|
||||
return headRequestObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see ListBucket
|
||||
*/
|
||||
public Future<S3Bucket> listBucket(String s3Bucket) {
|
||||
return listBucket(s3Bucket, ListBucketOptions.NONE);
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see ListBucket
|
||||
*/
|
||||
public Future<S3Bucket> listBucket(String s3Bucket) {
|
||||
return listBucket(s3Bucket, ListBucketOptions.NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see ListBucket
|
||||
*/
|
||||
public Future<S3Bucket> listBucket(String s3Bucket,
|
||||
ListBucketOptions options) {
|
||||
ListBucket getBucket = factory.createListBucket(s3Bucket, options);
|
||||
client.submit(getBucket);
|
||||
return getBucket;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see ListBucket
|
||||
*/
|
||||
public Future<S3Bucket> listBucket(String s3Bucket, ListBucketOptions options) {
|
||||
ListBucket getBucket = factory.createListBucket(s3Bucket, options);
|
||||
client.submit(getBucket);
|
||||
return getBucket;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see ListOwnedBuckets
|
||||
*/
|
||||
public Future<List<Metadata>> listOwnedBuckets() {
|
||||
ListOwnedBuckets listRequest = factory
|
||||
.createGetMetadataForOwnedBuckets();
|
||||
client.submit(listRequest);
|
||||
return listRequest;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see ListOwnedBuckets
|
||||
*/
|
||||
public Future<List<Metadata>> listOwnedBuckets() {
|
||||
ListOwnedBuckets listRequest = factory.createGetMetadataForOwnedBuckets();
|
||||
client.submit(listRequest);
|
||||
return listRequest;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,218 +28,115 @@ import static com.google.common.base.Preconditions.checkNotNull;
|
|||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.output.ByteArrayOutputStream;
|
||||
import org.bouncycastle.crypto.digests.MD5Digest;
|
||||
import org.bouncycastle.crypto.digests.SHA1Digest;
|
||||
import org.bouncycastle.crypto.macs.HMac;
|
||||
import org.bouncycastle.crypto.params.KeyParameter;
|
||||
import org.bouncycastle.util.encoders.Base64;
|
||||
import org.jclouds.aws.s3.domain.S3Object;
|
||||
import org.jclouds.util.Utils;
|
||||
import org.jclouds.aws.util.AWSUtils;
|
||||
|
||||
import java.io.*;
|
||||
import java.security.InvalidKeyException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.NoSuchProviderException;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Encryption, Hashing, and IO Utilities needed to sign and verify S3 requests
|
||||
* and responses.
|
||||
*
|
||||
* Encryption, Hashing, and IO Utilities needed to sign and verify S3 requests and responses.
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class S3Utils extends Utils {
|
||||
public class S3Utils extends AWSUtils {
|
||||
|
||||
private static final Pattern IP_PATTERN = Pattern
|
||||
.compile("b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?).)"
|
||||
+ "{3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)b");
|
||||
public static String validateBucketName(String bucketName) {
|
||||
checkNotNull(bucketName, "bucketName");
|
||||
checkArgument(bucketName.matches("^[a-z0-9].*"),
|
||||
"bucketName name must start with a number or letter");
|
||||
checkArgument(
|
||||
bucketName.matches("^[-_.a-z0-9]+"),
|
||||
"bucketName name can only contain lowercase letters, numbers, periods (.), underscores (_), and dashes (-)");
|
||||
checkArgument(bucketName.length() > 2 && bucketName.length() < 256,
|
||||
"bucketName name must be between 3 and 255 characters long");
|
||||
checkArgument(!IP_PATTERN.matcher(bucketName).matches(),
|
||||
"bucketName name cannot be ip address style");
|
||||
return bucketName;
|
||||
}
|
||||
|
||||
public static String validateBucketName(String bucketName) {
|
||||
checkNotNull(bucketName, "bucketName");
|
||||
checkArgument(bucketName.matches("^[a-z0-9].*"),
|
||||
"bucketName name must start with a number or letter");
|
||||
checkArgument(
|
||||
bucketName.matches("^[-_.a-z0-9]+"),
|
||||
"bucketName name can only contain lowercase letters, numbers, periods (.), underscores (_), and dashes (-)");
|
||||
checkArgument(bucketName.length() > 2 && bucketName.length() < 256,
|
||||
"bucketName name must be between 3 and 255 characters long");
|
||||
checkArgument(!IP_PATTERN.matcher(bucketName).matches(),
|
||||
"bucketName name cannot be ip address style");
|
||||
return bucketName;
|
||||
}
|
||||
public static long calculateSize(Object data) {
|
||||
long size = -1;
|
||||
if (data instanceof byte[]) {
|
||||
size = ((byte[]) data).length;
|
||||
} else if (data instanceof String) {
|
||||
size = ((String) data).length();
|
||||
} else if (data instanceof File) {
|
||||
size = ((File) data).length();
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
static final byte[] HEX_CHAR_TABLE = {(byte) '0', (byte) '1', (byte) '2',
|
||||
(byte) '3', (byte) '4', (byte) '5', (byte) '6', (byte) '7',
|
||||
(byte) '8', (byte) '9', (byte) 'a', (byte) 'b', (byte) 'c',
|
||||
(byte) 'd', (byte) 'e', (byte) 'f'};
|
||||
/**
|
||||
* @throws IOException
|
||||
*/
|
||||
public static byte[] md5(Object data) throws IOException {
|
||||
checkNotNull(data, "data must be set before calling generateMd5()");
|
||||
byte[] md5 = null;
|
||||
if (data == null) {
|
||||
} else if (data instanceof byte[]) {
|
||||
md5 = S3Utils.md5((byte[]) data);
|
||||
} else if (data instanceof String) {
|
||||
md5 = S3Utils.md5(((String) data).getBytes());
|
||||
} else if (data instanceof File) {
|
||||
md5 = S3Utils.md5(((File) data));
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Content not supported " + data.getClass());
|
||||
}
|
||||
return md5;
|
||||
|
||||
public static String toHexString(byte[] raw)
|
||||
throws UnsupportedEncodingException {
|
||||
byte[] hex = new byte[2 * raw.length];
|
||||
int index = 0;
|
||||
|
||||
for (byte b : raw) {
|
||||
int v = b & 0xFF;
|
||||
hex[index++] = HEX_CHAR_TABLE[v >>> 4];
|
||||
hex[index++] = HEX_CHAR_TABLE[v & 0xF];
|
||||
}
|
||||
return new String(hex, "ASCII");
|
||||
}
|
||||
|
||||
public static long calculateSize(Object data) {
|
||||
long size = -1;
|
||||
if (data instanceof byte[]) {
|
||||
size = ((byte[]) data).length;
|
||||
} else if (data instanceof String) {
|
||||
size = ((String) data).length();
|
||||
} else if (data instanceof File) {
|
||||
size = ((File) data).length();
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws IOException
|
||||
*/
|
||||
public static byte[] md5(Object data) throws IOException {
|
||||
checkNotNull(data, "data must be set before calling generateMd5()");
|
||||
byte[] md5 = null;
|
||||
if (data == null) {
|
||||
} else if (data instanceof byte[]) {
|
||||
md5 = S3Utils.md5((byte[]) data);
|
||||
} else if (data instanceof String) {
|
||||
md5 = S3Utils.md5(((String) data).getBytes());
|
||||
} else if (data instanceof File) {
|
||||
md5 = S3Utils.md5(((File) data));
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Content not supported "
|
||||
+ data.getClass());
|
||||
}
|
||||
return md5;
|
||||
|
||||
}
|
||||
|
||||
public static byte[] fromHexString(String hex) {
|
||||
byte[] bytes = new byte[hex.length() / 2];
|
||||
for (int i = 0; i < bytes.length; i++) {
|
||||
bytes[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2),
|
||||
16);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public static String hmacSha1Base64(String toEncode, byte[] key)
|
||||
throws NoSuchAlgorithmException, NoSuchProviderException,
|
||||
InvalidKeyException {
|
||||
HMac hmac = new HMac(new SHA1Digest());
|
||||
byte[] resBuf = new byte[hmac.getMacSize()];
|
||||
byte[] plainBytes = toEncode.getBytes();
|
||||
byte[] keyBytes = key;
|
||||
hmac.init(new KeyParameter(keyBytes));
|
||||
hmac.update(plainBytes, 0, plainBytes.length);
|
||||
hmac.doFinal(resBuf, 0);
|
||||
return toBase64String(resBuf);
|
||||
}
|
||||
|
||||
public static String md5Hex(byte[] toEncode)
|
||||
throws NoSuchAlgorithmException, NoSuchProviderException,
|
||||
InvalidKeyException, UnsupportedEncodingException {
|
||||
byte[] resBuf = md5(toEncode);
|
||||
return toHexString(resBuf);
|
||||
}
|
||||
|
||||
public static String md5Base64(byte[] toEncode)
|
||||
throws NoSuchAlgorithmException, NoSuchProviderException,
|
||||
InvalidKeyException {
|
||||
byte[] resBuf = md5(toEncode);
|
||||
return toBase64String(resBuf);
|
||||
}
|
||||
|
||||
public static String toBase64String(byte[] resBuf) {
|
||||
return new String(Base64.encode(resBuf));
|
||||
}
|
||||
|
||||
public static byte[] md5(byte[] plainBytes) {
|
||||
MD5Digest md5 = new MD5Digest();
|
||||
byte[] resBuf = new byte[md5.getDigestSize()];
|
||||
md5.update(plainBytes, 0, plainBytes.length);
|
||||
md5.doFinal(resBuf, 0);
|
||||
return resBuf;
|
||||
}
|
||||
|
||||
public static byte[] md5(File toEncode) throws IOException {
|
||||
MD5Digest md5 = new MD5Digest();
|
||||
byte[] resBuf = new byte[md5.getDigestSize()];
|
||||
byte[] buffer = new byte[1024];
|
||||
int numRead = -1;
|
||||
InputStream i = new FileInputStream(toEncode);
|
||||
try {
|
||||
do {
|
||||
numRead = i.read(buffer);
|
||||
if (numRead > 0) {
|
||||
md5.update(buffer, 0, numRead);
|
||||
}
|
||||
} while (numRead != -1);
|
||||
} finally {
|
||||
IOUtils.closeQuietly(i);
|
||||
}
|
||||
md5.doFinal(resBuf, 0);
|
||||
return resBuf;
|
||||
}
|
||||
|
||||
public static Md5InputStreamResult generateMd5Result(InputStream toEncode)
|
||||
throws IOException {
|
||||
MD5Digest md5 = new MD5Digest();
|
||||
byte[] resBuf = new byte[md5.getDigestSize()];
|
||||
byte[] buffer = new byte[1024];
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
long length = 0;
|
||||
int numRead = -1;
|
||||
try {
|
||||
do {
|
||||
numRead = toEncode.read(buffer);
|
||||
if (numRead > 0) {
|
||||
length += numRead;
|
||||
md5.update(buffer, 0, numRead);
|
||||
out.write(buffer, 0, numRead);
|
||||
}
|
||||
} while (numRead != -1);
|
||||
} finally {
|
||||
out.close();
|
||||
IOUtils.closeQuietly(toEncode);
|
||||
}
|
||||
md5.doFinal(resBuf, 0);
|
||||
return new Md5InputStreamResult(out.toByteArray(), resBuf, length);
|
||||
}
|
||||
|
||||
public static class Md5InputStreamResult {
|
||||
public final byte[] data;
|
||||
public final byte[] md5;
|
||||
public final long length;
|
||||
|
||||
Md5InputStreamResult(byte[] data, byte[] md5, long length) {
|
||||
this.data = checkNotNull(data, "data");
|
||||
this.md5 = checkNotNull(md5, "md5");
|
||||
checkArgument(length >= 0, "length cannot me negative");
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static String getContentAsStringAndClose(S3Object object)
|
||||
throws IOException {
|
||||
checkNotNull(object, "s3Object");
|
||||
checkNotNull(object.getData(), "s3Object.content");
|
||||
Object o = object.getData();
|
||||
|
||||
if (o instanceof InputStream) {
|
||||
String returnVal = toStringAndClose((InputStream) o);
|
||||
if (object.getMetadata().getContentType().indexOf("xml") >= 0) {
|
||||
}
|
||||
|
||||
public static Md5InputStreamResult generateMd5Result(InputStream toEncode) throws IOException {
|
||||
MD5Digest md5 = new MD5Digest();
|
||||
byte[] resBuf = new byte[md5.getDigestSize()];
|
||||
byte[] buffer = new byte[1024];
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
long length = 0;
|
||||
int numRead = -1;
|
||||
try {
|
||||
do {
|
||||
numRead = toEncode.read(buffer);
|
||||
if (numRead > 0) {
|
||||
length += numRead;
|
||||
md5.update(buffer, 0, numRead);
|
||||
out.write(buffer, 0, numRead);
|
||||
}
|
||||
return returnVal;
|
||||
} else {
|
||||
throw new IllegalArgumentException("Object type not supported: "
|
||||
+ o.getClass().getName());
|
||||
}
|
||||
}
|
||||
} while (numRead != -1);
|
||||
} finally {
|
||||
out.close();
|
||||
IOUtils.closeQuietly(toEncode);
|
||||
}
|
||||
md5.doFinal(resBuf, 0);
|
||||
return new Md5InputStreamResult(out.toByteArray(), resBuf, length);
|
||||
}
|
||||
|
||||
public static class Md5InputStreamResult {
|
||||
public final byte[] data;
|
||||
public final byte[] md5;
|
||||
public final long length;
|
||||
|
||||
Md5InputStreamResult(byte[] data, byte[] md5, long length) {
|
||||
this.data = checkNotNull(data, "data");
|
||||
this.md5 = checkNotNull(md5, "md5");
|
||||
checkArgument(length >= 0, "length cannot me negative");
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static String getContentAsStringAndClose(S3Object object) throws IOException {
|
||||
checkNotNull(object, "s3Object");
|
||||
checkNotNull(object.getData(), "s3Object.content");
|
||||
Object o = object.getData();
|
||||
|
||||
if (o instanceof InputStream) {
|
||||
String returnVal = toStringAndClose((InputStream) o);
|
||||
if (object.getMetadata().getContentType().indexOf("xml") >= 0) {
|
||||
|
||||
}
|
||||
return returnVal;
|
||||
} else {
|
||||
throw new IllegalArgumentException("Object type not supported: " + o.getClass().getName());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -24,8 +24,8 @@
|
|||
package org.jclouds.aws.s3.xml;
|
||||
|
||||
import org.jclouds.aws.s3.domain.S3Object;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.commands.callables.xml.ParseSax;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
|
@ -35,39 +35,34 @@ import com.google.inject.Inject;
|
|||
* <p/>
|
||||
* CopyObjectResult is the document we expect to parse.
|
||||
*
|
||||
* @see <a href=
|
||||
* "http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html"
|
||||
* />
|
||||
* @see <a href= "http://docs.amazonwebservices.com/AmazonS3/2006-03-01/RESTObjectCOPY.html" />
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class CopyObjectHandler extends
|
||||
ParseSax.HandlerWithResult<S3Object.Metadata> {
|
||||
public class CopyObjectHandler extends ParseSax.HandlerWithResult<S3Object.Metadata> {
|
||||
|
||||
private S3Object.Metadata metadata;
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
@Inject
|
||||
private DateService dateParser;
|
||||
private S3Object.Metadata metadata;
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
@Inject
|
||||
private DateService dateParser;
|
||||
|
||||
public void setKey(String key) {
|
||||
metadata = new S3Object.Metadata(key);
|
||||
}
|
||||
public void setKey(String key) {
|
||||
metadata = new S3Object.Metadata(key);
|
||||
}
|
||||
|
||||
public S3Object.Metadata getResult() {
|
||||
return metadata;
|
||||
}
|
||||
public S3Object.Metadata getResult() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
if (qName.equals("ETag")) {
|
||||
metadata.setMd5(S3Utils.fromHexString(currentText.toString()
|
||||
.replaceAll("\"", "")));
|
||||
} else if (qName.equals("LastModified")) {
|
||||
metadata.setLastModified(dateParser
|
||||
.iso8601DateParse(currentText.toString()));
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
if (qName.equals("ETag")) {
|
||||
metadata.setMd5(S3Utils.fromHexString(currentText.toString().replaceAll("\"", "")));
|
||||
} else if (qName.equals("LastModified")) {
|
||||
metadata.setLastModified(dateParser.iso8601DateParse(currentText.toString()));
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,34 +29,35 @@ import org.jclouds.http.commands.callables.xml.ParseSax;
|
|||
/**
|
||||
* Parses the error from the Amazon S3 REST API.
|
||||
*
|
||||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?UsingRESTError.html"
|
||||
* @see <a
|
||||
* href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?UsingRESTError.html"
|
||||
* />
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class ErrorHandler extends ParseSax.HandlerWithResult<S3Error> {
|
||||
|
||||
private S3Error error = new S3Error();
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
private S3Error error = new S3Error();
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
|
||||
public S3Error getResult() {
|
||||
return error;
|
||||
}
|
||||
public S3Error getResult() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
|
||||
if (qName.equals("Code")) {
|
||||
error.setCode(currentText.toString());
|
||||
} else if (qName.equals("Message")) {
|
||||
error.setMessage(currentText.toString());
|
||||
} else if (qName.equals("RequestId")) {
|
||||
error.setRequestId(currentText.toString());
|
||||
} else if (!qName.equals("Error")) {
|
||||
error.getDetails().put(qName, currentText.toString());
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
if (qName.equals("Code")) {
|
||||
error.setCode(currentText.toString());
|
||||
} else if (qName.equals("Message")) {
|
||||
error.setMessage(currentText.toString());
|
||||
} else if (qName.equals("RequestId")) {
|
||||
error.setRequestId(currentText.toString());
|
||||
} else if (!qName.equals("Error")) {
|
||||
error.getDetails().put(qName, currentText.toString());
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.List;
|
|||
|
||||
import org.jclouds.aws.s3.domain.CanonicalUser;
|
||||
import org.jclouds.aws.s3.domain.S3Bucket;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.commands.callables.xml.ParseSax;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
|
@ -38,47 +38,46 @@ import com.google.inject.Inject;
|
|||
* <p/>
|
||||
* ListAllMyBucketsResult xmlns="http://doc.s3.amazonaws.com/2006-03-01"
|
||||
*
|
||||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTServiceGET.html"
|
||||
* @see <a
|
||||
* href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTServiceGET.html"
|
||||
* />
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class ListAllMyBucketsHandler extends
|
||||
ParseSax.HandlerWithResult<List<S3Bucket.Metadata>> {
|
||||
public class ListAllMyBucketsHandler extends ParseSax.HandlerWithResult<List<S3Bucket.Metadata>> {
|
||||
|
||||
private List<S3Bucket.Metadata> buckets = new ArrayList<S3Bucket.Metadata>();
|
||||
private S3Bucket.Metadata currentS3Bucket;
|
||||
private CanonicalUser currentOwner;
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
private List<S3Bucket.Metadata> buckets = new ArrayList<S3Bucket.Metadata>();
|
||||
private S3Bucket.Metadata currentS3Bucket;
|
||||
private CanonicalUser currentOwner;
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
|
||||
private final DateService dateParser;
|
||||
private final DateService dateParser;
|
||||
|
||||
@Inject
|
||||
public ListAllMyBucketsHandler(DateService dateParser) {
|
||||
this.dateParser = dateParser;
|
||||
}
|
||||
@Inject
|
||||
public ListAllMyBucketsHandler(DateService dateParser) {
|
||||
this.dateParser = dateParser;
|
||||
}
|
||||
|
||||
public List<S3Bucket.Metadata> getResult() {
|
||||
return buckets;
|
||||
}
|
||||
public List<S3Bucket.Metadata> getResult() {
|
||||
return buckets;
|
||||
}
|
||||
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
if (qName.equals("ID")) { // owner stuff
|
||||
currentOwner = new CanonicalUser(currentText.toString());
|
||||
} else if (qName.equals("DisplayName")) {
|
||||
currentOwner.setDisplayName(currentText.toString());
|
||||
} else if (qName.equals("Bucket")) {
|
||||
currentS3Bucket.setOwner(currentOwner);
|
||||
buckets.add(currentS3Bucket);
|
||||
} else if (qName.equals("Name")) {
|
||||
currentS3Bucket = new S3Bucket.Metadata(currentText.toString());
|
||||
} else if (qName.equals("CreationDate")) {
|
||||
currentS3Bucket.setCreationDate(dateParser
|
||||
.iso8601DateParse(currentText.toString()));
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
if (qName.equals("ID")) { // owner stuff
|
||||
currentOwner = new CanonicalUser(currentText.toString());
|
||||
} else if (qName.equals("DisplayName")) {
|
||||
currentOwner.setDisplayName(currentText.toString());
|
||||
} else if (qName.equals("Bucket")) {
|
||||
currentS3Bucket.setOwner(currentOwner);
|
||||
buckets.add(currentS3Bucket);
|
||||
} else if (qName.equals("Name")) {
|
||||
currentS3Bucket = new S3Bucket.Metadata(currentText.toString());
|
||||
} else if (qName.equals("CreationDate")) {
|
||||
currentS3Bucket.setCreationDate(dateParser.iso8601DateParse(currentText.toString()));
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,100 +24,99 @@
|
|||
package org.jclouds.aws.s3.xml;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import com.google.inject.Inject;
|
||||
|
||||
import org.jclouds.aws.s3.domain.CanonicalUser;
|
||||
import org.jclouds.aws.s3.domain.S3Bucket;
|
||||
import org.jclouds.aws.s3.domain.S3Object;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.commands.callables.xml.ParseSax;
|
||||
import org.xml.sax.Attributes;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
|
||||
/**
|
||||
* Parses the following XML document:
|
||||
* <p/>
|
||||
* ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01"
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
* @see <a href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTBucketGET.html"
|
||||
* @see <a
|
||||
* href="http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?RESTBucketGET.html"
|
||||
* />
|
||||
*/
|
||||
public class ListBucketHandler extends ParseSax.HandlerWithResult<S3Bucket> {
|
||||
private S3Bucket s3Bucket;
|
||||
private S3Object.Metadata currentObjectMetadata;
|
||||
private CanonicalUser currentOwner;
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
private S3Bucket s3Bucket;
|
||||
private S3Object.Metadata currentObjectMetadata;
|
||||
private CanonicalUser currentOwner;
|
||||
private StringBuilder currentText = new StringBuilder();
|
||||
|
||||
private final DateService dateParser;
|
||||
private final DateService dateParser;
|
||||
|
||||
@Inject
|
||||
public ListBucketHandler(DateService dateParser) {
|
||||
this.dateParser = dateParser;
|
||||
}
|
||||
@Inject
|
||||
public ListBucketHandler(DateService dateParser) {
|
||||
this.dateParser = dateParser;
|
||||
}
|
||||
|
||||
public S3Bucket getResult() {
|
||||
return s3Bucket;
|
||||
}
|
||||
public S3Bucket getResult() {
|
||||
return s3Bucket;
|
||||
}
|
||||
|
||||
public void setBucketName(String bucketName) {
|
||||
this.s3Bucket = new S3Bucket(checkNotNull(bucketName, "bucketName"));
|
||||
}
|
||||
public void setBucketName(String bucketName) {
|
||||
this.s3Bucket = new S3Bucket(checkNotNull(bucketName, "bucketName"));
|
||||
}
|
||||
|
||||
private boolean inCommonPrefixes;
|
||||
private boolean inCommonPrefixes;
|
||||
|
||||
public void startElement(String uri, String name, String qName,
|
||||
Attributes attrs) {
|
||||
if (qName.equals("CommonPrefixes")) {
|
||||
inCommonPrefixes = true;
|
||||
}
|
||||
}
|
||||
public void startElement(String uri, String name, String qName, Attributes attrs) {
|
||||
if (qName.equals("CommonPrefixes")) {
|
||||
inCommonPrefixes = true;
|
||||
}
|
||||
}
|
||||
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
if (qName.equals("ID")) {
|
||||
currentOwner = new CanonicalUser(currentText.toString());
|
||||
} else if (qName.equals("DisplayName")) {
|
||||
currentOwner.setDisplayName(currentText.toString());
|
||||
} else if (qName.equals("Key")) { // content stuff
|
||||
currentObjectMetadata = new S3Object.Metadata(currentText
|
||||
.toString());
|
||||
} else if (qName.equals("LastModified")) {
|
||||
currentObjectMetadata.setLastModified(dateParser
|
||||
.iso8601DateParse(currentText.toString()));
|
||||
} else if (qName.equals("ETag")) {
|
||||
currentObjectMetadata.setMd5(S3Utils.fromHexString(currentText
|
||||
.toString().replaceAll("\"", "")));
|
||||
} else if (qName.equals("Size")) {
|
||||
currentObjectMetadata.setSize(Long
|
||||
.parseLong(currentText.toString()));
|
||||
} else if (qName.equals("Owner")) {
|
||||
currentObjectMetadata.setOwner(currentOwner);
|
||||
} else if (qName.equals("StorageClass")) {
|
||||
currentObjectMetadata.setStorageClass(currentText.toString());
|
||||
} else if (qName.equals("Contents")) {
|
||||
s3Bucket.getContents().add(currentObjectMetadata);
|
||||
} else if (qName.equals("Name")) {// bucketName stuff last, as least likely
|
||||
} else if (qName.equals("Prefix")) {
|
||||
String prefix = currentText.toString().trim();
|
||||
if (inCommonPrefixes)
|
||||
s3Bucket.getCommonPrefixes().add(prefix);
|
||||
else
|
||||
s3Bucket.setPrefix(prefix);
|
||||
} else if (qName.equals("Delimiter")) {
|
||||
if (!currentText.toString().equals(""))
|
||||
s3Bucket.setDelimiter(currentText.toString().trim());
|
||||
} else if (qName.equals("Marker")) {
|
||||
if (!currentText.toString().equals(""))
|
||||
s3Bucket.setMarker(currentText.toString());
|
||||
} else if (qName.equals("MaxKeys")) {
|
||||
s3Bucket.setMaxKeys(Long.parseLong(currentText.toString()));
|
||||
} else if (qName.equals("IsTruncated")) {
|
||||
boolean isTruncated = Boolean.parseBoolean(currentText.toString());
|
||||
s3Bucket.setTruncated(isTruncated);
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
public void endElement(String uri, String name, String qName) {
|
||||
if (qName.equals("ID")) {
|
||||
currentOwner = new CanonicalUser(currentText.toString());
|
||||
} else if (qName.equals("DisplayName")) {
|
||||
currentOwner.setDisplayName(currentText.toString());
|
||||
} else if (qName.equals("Key")) { // content stuff
|
||||
currentObjectMetadata = new S3Object.Metadata(currentText.toString());
|
||||
} else if (qName.equals("LastModified")) {
|
||||
currentObjectMetadata.setLastModified(dateParser.iso8601DateParse(currentText.toString()));
|
||||
} else if (qName.equals("ETag")) {
|
||||
currentObjectMetadata.setMd5(S3Utils.fromHexString(currentText.toString().replaceAll("\"",
|
||||
"")));
|
||||
} else if (qName.equals("Size")) {
|
||||
currentObjectMetadata.setSize(Long.parseLong(currentText.toString()));
|
||||
} else if (qName.equals("Owner")) {
|
||||
currentObjectMetadata.setOwner(currentOwner);
|
||||
} else if (qName.equals("StorageClass")) {
|
||||
currentObjectMetadata.setStorageClass(currentText.toString());
|
||||
} else if (qName.equals("Contents")) {
|
||||
s3Bucket.getContents().add(currentObjectMetadata);
|
||||
} else if (qName.equals("Name")) {// bucketName stuff last, as least likely
|
||||
} else if (qName.equals("Prefix")) {
|
||||
String prefix = currentText.toString().trim();
|
||||
if (inCommonPrefixes)
|
||||
s3Bucket.getCommonPrefixes().add(prefix);
|
||||
else
|
||||
s3Bucket.setPrefix(prefix);
|
||||
} else if (qName.equals("Delimiter")) {
|
||||
if (!currentText.toString().equals(""))
|
||||
s3Bucket.setDelimiter(currentText.toString().trim());
|
||||
} else if (qName.equals("Marker")) {
|
||||
if (!currentText.toString().equals(""))
|
||||
s3Bucket.setMarker(currentText.toString());
|
||||
} else if (qName.equals("MaxKeys")) {
|
||||
s3Bucket.setMaxKeys(Long.parseLong(currentText.toString()));
|
||||
} else if (qName.equals("IsTruncated")) {
|
||||
boolean isTruncated = Boolean.parseBoolean(currentText.toString());
|
||||
s3Bucket.setTruncated(isTruncated);
|
||||
}
|
||||
currentText = new StringBuilder();
|
||||
}
|
||||
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
public void characters(char ch[], int start, int length) {
|
||||
currentText.append(ch, start, length);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,81 +23,81 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.xml;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Provider;
|
||||
import java.util.List;
|
||||
|
||||
import org.jclouds.aws.s3.domain.S3Bucket;
|
||||
import org.jclouds.aws.s3.domain.S3Error;
|
||||
import org.jclouds.aws.s3.domain.S3Object;
|
||||
import org.jclouds.http.commands.callables.xml.ParseSax;
|
||||
|
||||
import java.util.List;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Provider;
|
||||
|
||||
/**
|
||||
* Creates Parsers needed to interpret S3 Server messages. This class uses guice
|
||||
* assisted inject, which mandates the creation of many single-method
|
||||
* interfaces. These interfaces are not intended for public api.
|
||||
*
|
||||
* Creates Parsers needed to interpret S3 Server messages. This class uses guice assisted inject,
|
||||
* which mandates the creation of many single-method interfaces. These interfaces are not intended
|
||||
* for public api.
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class S3ParserFactory {
|
||||
|
||||
@Inject
|
||||
private GenericParseFactory<List<S3Bucket.Metadata>> parseListAllMyBucketsFactory;
|
||||
@Inject
|
||||
private GenericParseFactory<List<S3Bucket.Metadata>> parseListAllMyBucketsFactory;
|
||||
|
||||
@VisibleForTesting
|
||||
public static interface GenericParseFactory<T> {
|
||||
ParseSax<T> create(ParseSax.HandlerWithResult<T> handler);
|
||||
}
|
||||
@VisibleForTesting
|
||||
public static interface GenericParseFactory<T> {
|
||||
ParseSax<T> create(ParseSax.HandlerWithResult<T> handler);
|
||||
}
|
||||
|
||||
@Inject
|
||||
Provider<ListAllMyBucketsHandler> ListAllMyBucketsHandlerprovider;
|
||||
@Inject
|
||||
Provider<ListAllMyBucketsHandler> ListAllMyBucketsHandlerprovider;
|
||||
|
||||
/**
|
||||
* @return a parser used to handle {@link org.jclouds.aws.s3.commands.ListOwnedBuckets} responses
|
||||
*/
|
||||
public ParseSax<List<S3Bucket.Metadata>> createListBucketsParser() {
|
||||
return parseListAllMyBucketsFactory
|
||||
.create(ListAllMyBucketsHandlerprovider.get());
|
||||
}
|
||||
/**
|
||||
* @return a parser used to handle {@link org.jclouds.aws.s3.commands.ListOwnedBuckets} responses
|
||||
*/
|
||||
public ParseSax<List<S3Bucket.Metadata>> createListBucketsParser() {
|
||||
return parseListAllMyBucketsFactory.create(ListAllMyBucketsHandlerprovider.get());
|
||||
}
|
||||
|
||||
@Inject
|
||||
private GenericParseFactory<S3Bucket> parseListBucketFactory;
|
||||
@Inject
|
||||
private GenericParseFactory<S3Bucket> parseListBucketFactory;
|
||||
|
||||
@Inject
|
||||
Provider<ListBucketHandler> ListBucketHandlerprovider;
|
||||
@Inject
|
||||
Provider<ListBucketHandler> ListBucketHandlerprovider;
|
||||
|
||||
/**
|
||||
* @return a parser used to handle {@link org.jclouds.aws.s3.commands.ListBucket} responses
|
||||
*/
|
||||
public ParseSax<S3Bucket> createListBucketParser() {
|
||||
return parseListBucketFactory.create(ListBucketHandlerprovider.get());
|
||||
}
|
||||
/**
|
||||
* @return a parser used to handle {@link org.jclouds.aws.s3.commands.ListBucket} responses
|
||||
*/
|
||||
public ParseSax<S3Bucket> createListBucketParser() {
|
||||
return parseListBucketFactory.create(ListBucketHandlerprovider.get());
|
||||
}
|
||||
|
||||
@Inject
|
||||
private GenericParseFactory<S3Object.Metadata> parseCopyObjectFactory;
|
||||
@Inject
|
||||
private GenericParseFactory<S3Object.Metadata> parseCopyObjectFactory;
|
||||
|
||||
@Inject
|
||||
Provider<CopyObjectHandler> copyObjectHandlerProvider;
|
||||
@Inject
|
||||
Provider<CopyObjectHandler> copyObjectHandlerProvider;
|
||||
|
||||
/**
|
||||
* @return a parser used to handle {@link org.jclouds.aws.s3.commands.CopyObject} responses
|
||||
*/
|
||||
public ParseSax<S3Object.Metadata> createCopyObjectParser() {
|
||||
return parseCopyObjectFactory.create(copyObjectHandlerProvider.get());
|
||||
}
|
||||
/**
|
||||
* @return a parser used to handle {@link org.jclouds.aws.s3.commands.CopyObject} responses
|
||||
*/
|
||||
public ParseSax<S3Object.Metadata> createCopyObjectParser() {
|
||||
return parseCopyObjectFactory.create(copyObjectHandlerProvider.get());
|
||||
}
|
||||
|
||||
@Inject
|
||||
private GenericParseFactory<S3Error> parseErrorFactory;
|
||||
@Inject
|
||||
private GenericParseFactory<S3Error> parseErrorFactory;
|
||||
|
||||
@Inject
|
||||
Provider<ErrorHandler> errorHandlerProvider;
|
||||
@Inject
|
||||
Provider<ErrorHandler> errorHandlerProvider;
|
||||
|
||||
/**
|
||||
* @return a parser used to handle error conditions.
|
||||
*/
|
||||
public ParseSax<S3Error> createErrorParser() {
|
||||
return parseErrorFactory.create(errorHandlerProvider.get());
|
||||
}
|
||||
/**
|
||||
* @return a parser used to handle error conditions.
|
||||
*/
|
||||
public ParseSax<S3Error> createErrorParser() {
|
||||
return parseErrorFactory.create(errorHandlerProvider.get());
|
||||
}
|
||||
|
||||
}
|
|
@ -23,124 +23,125 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.jclouds.aws.s3.internal.BaseS3Map;
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Parameters;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.io.*;
|
||||
import java.lang.reflect.Method;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Map;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.jclouds.aws.s3.internal.BaseS3Map;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Parameters;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
|
||||
@Test
|
||||
public abstract class BaseS3MapIntegrationTest<T> extends S3IntegrationTest {
|
||||
|
||||
public abstract void testPutAll();
|
||||
public abstract void testPutAll();
|
||||
|
||||
public abstract void testEntrySet() throws IOException;
|
||||
public abstract void testEntrySet() throws IOException;
|
||||
|
||||
public abstract void testValues() throws IOException;
|
||||
public abstract void testValues() throws IOException;
|
||||
|
||||
protected BaseS3Map<T> map;
|
||||
protected Map<String, String> fiveStrings = ImmutableMap.of("one", "apple",
|
||||
"two", "bear", "three", "candy", "four", "dogma", "five", "emma");
|
||||
|
||||
// IMPORTANT: Java 5 struggles to correctly infer types in some cases which affects
|
||||
// this ImmutableMap. The explicit typing works around the issue. Java 6 seems to cope.
|
||||
// http://groups.google.com/group/google-collections-users/browse_thread/thread/df70c482c93a25d8
|
||||
protected Map<String, byte[]> fiveBytes = ImmutableMap.<String, byte[]>of(
|
||||
"one", "apple".getBytes(), // Explicit cast necessary for Java 5
|
||||
"two", "bear".getBytes(), "three", "candy".getBytes(),
|
||||
"four", "dogma".getBytes(), "five", "emma".getBytes());
|
||||
protected Map<String, InputStream> fiveInputs;
|
||||
protected Map<String, File> fiveFiles;
|
||||
String tmpDirectory;
|
||||
protected BaseS3Map<T> map;
|
||||
protected Map<String, String> fiveStrings = ImmutableMap.of("one", "apple", "two", "bear",
|
||||
"three", "candy", "four", "dogma", "five", "emma");
|
||||
|
||||
@BeforeMethod(dependsOnMethods = "setUpBucket", groups = {"integration", "live"})
|
||||
@Parameters({"basedir"})
|
||||
protected void setUpTempDir(String basedir) throws InterruptedException,
|
||||
ExecutionException, FileNotFoundException, IOException,
|
||||
TimeoutException {
|
||||
tmpDirectory = basedir + File.separator + "target" + File.separator
|
||||
+ "testFiles" + File.separator + getClass().getSimpleName();
|
||||
new File(tmpDirectory).mkdirs();
|
||||
// IMPORTANT: Java 5 struggles to correctly infer types in some cases which affects
|
||||
// this ImmutableMap. The explicit typing works around the issue. Java 6 seems to cope.
|
||||
// http://groups.google.com/group/google-collections-users/browse_thread/thread/df70c482c93a25d8
|
||||
protected Map<String, byte[]> fiveBytes = ImmutableMap.<String, byte[]> of("one",
|
||||
"apple".getBytes(), // Explicit cast necessary for Java 5
|
||||
"two", "bear".getBytes(), "three", "candy".getBytes(), "four", "dogma".getBytes(),
|
||||
"five", "emma".getBytes());
|
||||
protected Map<String, InputStream> fiveInputs;
|
||||
protected Map<String, File> fiveFiles;
|
||||
String tmpDirectory;
|
||||
|
||||
fiveFiles = ImmutableMap.of("one", new File(tmpDirectory, "apple"),
|
||||
"two", new File(tmpDirectory, "bear"), "three", new File(
|
||||
tmpDirectory, "candy"), "four", new File(tmpDirectory,
|
||||
"dogma"), "five", new File(tmpDirectory, "emma"));
|
||||
@BeforeMethod(dependsOnMethods = "setUpBucket", groups = { "integration", "live" })
|
||||
@Parameters( { "basedir" })
|
||||
protected void setUpTempDir(String basedir) throws InterruptedException, ExecutionException,
|
||||
FileNotFoundException, IOException, TimeoutException {
|
||||
tmpDirectory = basedir + File.separator + "target" + File.separator + "testFiles"
|
||||
+ File.separator + getClass().getSimpleName();
|
||||
new File(tmpDirectory).mkdirs();
|
||||
|
||||
for (File file : fiveFiles.values()) {
|
||||
IOUtils.write(file.getName(), new FileOutputStream(file));
|
||||
}
|
||||
fiveFiles = ImmutableMap.of("one", new File(tmpDirectory, "apple"), "two", new File(
|
||||
tmpDirectory, "bear"), "three", new File(tmpDirectory, "candy"), "four", new File(
|
||||
tmpDirectory, "dogma"), "five", new File(tmpDirectory, "emma"));
|
||||
|
||||
fiveInputs = ImmutableMap.of("one", IOUtils.toInputStream("apple"),
|
||||
"two", IOUtils.toInputStream("bear"), "three", IOUtils
|
||||
.toInputStream("candy"), "four", IOUtils
|
||||
.toInputStream("dogma"), "five", IOUtils
|
||||
.toInputStream("emma"));
|
||||
map = createMap(context, bucketName);
|
||||
map.clear();
|
||||
}
|
||||
for (File file : fiveFiles.values()) {
|
||||
IOUtils.write(file.getName(), new FileOutputStream(file));
|
||||
}
|
||||
|
||||
protected abstract BaseS3Map<T> createMap(S3Context context, String bucket);
|
||||
fiveInputs = ImmutableMap.of("one", IOUtils.toInputStream("apple"), "two", IOUtils
|
||||
.toInputStream("bear"), "three", IOUtils.toInputStream("candy"), "four", IOUtils
|
||||
.toInputStream("dogma"), "five", IOUtils.toInputStream("emma"));
|
||||
map = createMap(context, bucketName);
|
||||
map.clear();
|
||||
}
|
||||
|
||||
@Test(groups = {"integration", "live"})
|
||||
public void testClear() {
|
||||
map.clear();
|
||||
assertEquals(map.size(), 0);
|
||||
putString("one", "apple");
|
||||
assertEquals(map.size(), 1);
|
||||
map.clear();
|
||||
assertEquals(map.size(), 0);
|
||||
}
|
||||
protected abstract BaseS3Map<T> createMap(S3Context context, String bucket);
|
||||
|
||||
@Test(groups = {"integration", "live"})
|
||||
public abstract void testRemove() throws IOException;
|
||||
@Test(groups = { "integration", "live" })
|
||||
public void testClear() {
|
||||
map.clear();
|
||||
assertEquals(map.size(), 0);
|
||||
putString("one", "apple");
|
||||
assertEquals(map.size(), 1);
|
||||
map.clear();
|
||||
assertEquals(map.size(), 0);
|
||||
}
|
||||
|
||||
@Test(groups = {"integration", "live"})
|
||||
public void testKeySet() {
|
||||
assertEquals(map.keySet().size(), 0);
|
||||
putString("one", "two");
|
||||
assertEquals(map.keySet(), ImmutableSet.of("one"));
|
||||
}
|
||||
@Test(groups = { "integration", "live" })
|
||||
public abstract void testRemove() throws IOException;
|
||||
|
||||
@Test(groups = {"integration", "live"})
|
||||
public void testContainsKey() {
|
||||
assert !map.containsKey("one");
|
||||
putString("one", "apple");
|
||||
assert map.containsKey("one");
|
||||
}
|
||||
@Test(groups = { "integration", "live" })
|
||||
public void testKeySet() {
|
||||
assertEquals(map.keySet().size(), 0);
|
||||
putString("one", "two");
|
||||
assertEquals(map.keySet(), ImmutableSet.of("one"));
|
||||
}
|
||||
|
||||
@Test(groups = {"integration", "live"})
|
||||
public void testIsEmpty() {
|
||||
assert map.isEmpty();
|
||||
putString("one", "apple");
|
||||
assert !map.isEmpty();
|
||||
}
|
||||
@Test(groups = { "integration", "live" })
|
||||
public void testContainsKey() {
|
||||
assert !map.containsKey("one");
|
||||
putString("one", "apple");
|
||||
assert map.containsKey("one");
|
||||
}
|
||||
|
||||
abstract protected void putString(String key, String value);
|
||||
@Test(groups = { "integration", "live" })
|
||||
public void testIsEmpty() {
|
||||
assert map.isEmpty();
|
||||
putString("one", "apple");
|
||||
assert !map.isEmpty();
|
||||
}
|
||||
|
||||
protected void fourLeftRemovingOne() {
|
||||
map.remove("one");
|
||||
assertEquals(map.size(), 4);
|
||||
assertEquals(new TreeSet<String>(map.keySet()), new TreeSet<String>(
|
||||
ImmutableSet.of("two", "three", "four", "five")));
|
||||
}
|
||||
abstract protected void putString(String key, String value);
|
||||
|
||||
@Test(groups = {"integration", "live"})
|
||||
public abstract void testPut() throws IOException;
|
||||
protected void fourLeftRemovingOne() {
|
||||
map.remove("one");
|
||||
assertEquals(map.size(), 4);
|
||||
assertEquals(new TreeSet<String>(map.keySet()), new TreeSet<String>(ImmutableSet.of("two",
|
||||
"three", "four", "five")));
|
||||
}
|
||||
|
||||
@Test(groups = {"integration", "live"})
|
||||
public void testGetBucket() {
|
||||
assertEquals(map.getBucket().getName(), bucketName);
|
||||
}
|
||||
@Test(groups = { "integration", "live" })
|
||||
public abstract void testPut() throws IOException;
|
||||
|
||||
@Test(groups = { "integration", "live" })
|
||||
public void testGetBucket() {
|
||||
assertEquals(map.getBucket().getName(), bucketName);
|
||||
}
|
||||
|
||||
}
|
|
@ -23,471 +23,482 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import static com.google.common.base.Preconditions.checkNotNull;
|
||||
import static org.easymock.classextension.EasyMock.createNiceMock;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.output.ByteArrayOutputStream;
|
||||
import org.jclouds.aws.s3.commands.CopyObject;
|
||||
import org.jclouds.aws.s3.commands.options.CopyObjectOptions;
|
||||
import org.jclouds.aws.s3.commands.options.GetObjectOptions;
|
||||
import org.jclouds.aws.s3.commands.options.ListBucketOptions;
|
||||
import org.jclouds.aws.s3.commands.options.PutBucketOptions;
|
||||
import org.jclouds.aws.s3.commands.options.PutObjectOptions;
|
||||
import org.jclouds.aws.s3.domain.S3Bucket;
|
||||
import org.jclouds.aws.s3.domain.S3Object;
|
||||
import org.jclouds.aws.s3.domain.S3Bucket.Metadata;
|
||||
import org.jclouds.aws.s3.domain.acl.CannedAccessPolicy;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.jclouds.http.HttpResponse;
|
||||
import org.jclouds.http.HttpResponseException;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.thoughtworks.xstream.XStream;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.output.ByteArrayOutputStream;
|
||||
import static org.easymock.classextension.EasyMock.createNiceMock;
|
||||
import org.jclouds.aws.s3.commands.CopyObject;
|
||||
import org.jclouds.aws.s3.commands.options.*;
|
||||
import org.jclouds.aws.s3.domain.S3Bucket;
|
||||
import org.jclouds.aws.s3.domain.S3Bucket.Metadata;
|
||||
import org.jclouds.aws.s3.domain.S3Object;
|
||||
import org.jclouds.aws.s3.domain.acl.CannedAccessPolicy;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.http.HttpResponse;
|
||||
import org.jclouds.http.HttpResponseException;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
|
||||
/**
|
||||
* // TODO: Adrian: Document this!
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
public class StubS3Connection implements S3Connection {
|
||||
private static Map<String, Map<String, S3Object>> bucketToContents = new ConcurrentHashMap<String, Map<String, S3Object>>();
|
||||
private static Map<String, Metadata.LocationConstraint> bucketToLocation = new ConcurrentHashMap<String, Metadata.LocationConstraint>();
|
||||
private static Map<String, CannedAccessPolicy> keyToAcl = new ConcurrentHashMap<String, CannedAccessPolicy>();
|
||||
private static Map<String, Map<String, S3Object>> bucketToContents = new ConcurrentHashMap<String, Map<String, S3Object>>();
|
||||
private static Map<String, Metadata.LocationConstraint> bucketToLocation = new ConcurrentHashMap<String, Metadata.LocationConstraint>();
|
||||
private static Map<String, CannedAccessPolicy> keyToAcl = new ConcurrentHashMap<String, CannedAccessPolicy>();
|
||||
|
||||
/**
|
||||
* @throws java.io.IOException
|
||||
*/
|
||||
public static byte[] toByteArray(Object data) throws IOException {
|
||||
checkNotNull(data, "data must be set before calling generateMd5()");
|
||||
byte[] bytes = null;
|
||||
if (data == null || data instanceof byte[]) {
|
||||
bytes = (byte[]) data;
|
||||
} else if (data instanceof String) {
|
||||
bytes = ((String) data).getBytes();
|
||||
} else if (data instanceof File || data instanceof InputStream) {
|
||||
InputStream io = (data instanceof InputStream) ? (InputStream) data : new FileInputStream((File) data);
|
||||
bytes = IOUtils.toByteArray(io);
|
||||
IOUtils.closeQuietly(io);
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Content not supported "
|
||||
+ data.getClass());
|
||||
}
|
||||
return bytes;
|
||||
/**
|
||||
* @throws java.io.IOException
|
||||
*/
|
||||
public static byte[] toByteArray(Object data) throws IOException {
|
||||
checkNotNull(data, "data must be set before calling generateMd5()");
|
||||
byte[] bytes = null;
|
||||
if (data == null || data instanceof byte[]) {
|
||||
bytes = (byte[]) data;
|
||||
} else if (data instanceof String) {
|
||||
bytes = ((String) data).getBytes();
|
||||
} else if (data instanceof File || data instanceof InputStream) {
|
||||
InputStream io = (data instanceof InputStream) ? (InputStream) data : new FileInputStream(
|
||||
(File) data);
|
||||
bytes = IOUtils.toByteArray(io);
|
||||
IOUtils.closeQuietly(io);
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Content not supported " + data.getClass());
|
||||
}
|
||||
return bytes;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public Future<S3Object> getObject(final String s3Bucket, final String key) {
|
||||
return getObject(s3Bucket, key, new GetObjectOptions());
|
||||
public Future<S3Object> getObject(final String s3Bucket, final String key) {
|
||||
return getObject(s3Bucket, key, new GetObjectOptions());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public S3Object.Metadata copy(S3Object.Metadata in) {
|
||||
return (S3Object.Metadata) xstream.fromXML(xstream.toXML(in));
|
||||
}
|
||||
public S3Object.Metadata copy(S3Object.Metadata in) {
|
||||
return (S3Object.Metadata) xstream.fromXML(xstream.toXML(in));
|
||||
}
|
||||
|
||||
public S3Object.Metadata copy(S3Object.Metadata in, String newKey) {
|
||||
return (S3Object.Metadata) xstream.fromXML(xstream.toXML(in).replaceAll(in.getKey(), newKey));
|
||||
}
|
||||
public S3Object.Metadata copy(S3Object.Metadata in, String newKey) {
|
||||
return (S3Object.Metadata) xstream.fromXML(xstream.toXML(in).replaceAll(in.getKey(), newKey));
|
||||
}
|
||||
|
||||
public Future<S3Object.Metadata> headObject(final String s3Bucket,
|
||||
final String key) {
|
||||
return new FutureBase<S3Object.Metadata>() {
|
||||
public S3Object.Metadata get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
if (!bucketToContents.containsKey(s3Bucket))
|
||||
return S3Object.Metadata.NOT_FOUND;
|
||||
Map<String, S3Object> realContents = bucketToContents
|
||||
.get(s3Bucket);
|
||||
if (!realContents.containsKey(key))
|
||||
return S3Object.Metadata.NOT_FOUND;
|
||||
return realContents.get(key).getMetadata();
|
||||
public Future<S3Object.Metadata> headObject(final String s3Bucket, final String key) {
|
||||
return new FutureBase<S3Object.Metadata>() {
|
||||
public S3Object.Metadata get() throws InterruptedException, ExecutionException {
|
||||
if (!bucketToContents.containsKey(s3Bucket))
|
||||
return S3Object.Metadata.NOT_FOUND;
|
||||
Map<String, S3Object> realContents = bucketToContents.get(s3Bucket);
|
||||
if (!realContents.containsKey(key))
|
||||
return S3Object.Metadata.NOT_FOUND;
|
||||
return realContents.get(key).getMetadata();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<Boolean> deleteObject(final String s3Bucket, final String key) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException, ExecutionException {
|
||||
if (bucketToContents.containsKey(s3Bucket)) {
|
||||
bucketToContents.get(s3Bucket).remove(key);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<Boolean> deleteObject(final String s3Bucket, final String key) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
if (bucketToContents.containsKey(s3Bucket)) {
|
||||
bucketToContents.get(s3Bucket).remove(key);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<byte[]> putObject(final String s3Bucket, final S3Object object) {
|
||||
return putObject(s3Bucket, object, new PutObjectOptions());
|
||||
}
|
||||
|
||||
public Future<Boolean> putBucketIfNotExists(final String s3Bucket) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
if (!bucketToContents.containsKey(s3Bucket)) {
|
||||
bucketToContents.put(s3Bucket,
|
||||
new ConcurrentHashMap<String, S3Object>());
|
||||
}
|
||||
return bucketToContents.containsKey(s3Bucket);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<Boolean> deleteBucketIfEmpty(final String s3Bucket) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
if (bucketToContents.containsKey(s3Bucket)) {
|
||||
if (bucketToContents.get(s3Bucket).size() == 0)
|
||||
bucketToContents.remove(s3Bucket);
|
||||
else
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
XStream xstream = new XStream();
|
||||
|
||||
public Future<S3Object.Metadata> copyObject(final String sourceBucket,
|
||||
final String sourceObject, final String destinationBucket,
|
||||
final String destinationObject) {
|
||||
return copyObject(sourceBucket, sourceObject, destinationBucket, destinationObject, new CopyObjectOptions());
|
||||
}
|
||||
|
||||
public Future<Boolean> bucketExists(final String s3Bucket) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
return bucketToContents.containsKey(s3Bucket);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<S3Bucket> listBucket(final String s3Bucket) {
|
||||
return listBucket(s3Bucket, new ListBucketOptions());
|
||||
}
|
||||
|
||||
private abstract class FutureBase<V> implements Future<V> {
|
||||
public boolean cancel(boolean b) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isCancelled() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isDone() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public V get(long l, TimeUnit timeUnit) throws InterruptedException,
|
||||
ExecutionException, TimeoutException {
|
||||
return get();
|
||||
}
|
||||
}
|
||||
public Future<byte[]> putObject(final String s3Bucket, final S3Object object) {
|
||||
return putObject(s3Bucket, object, new PutObjectOptions());
|
||||
}
|
||||
|
||||
public Future<List<Metadata>> listOwnedBuckets() {
|
||||
return new FutureBase<List<S3Bucket.Metadata>>() {
|
||||
public List<S3Bucket.Metadata> get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
return Lists.newArrayList(Iterables.transform(
|
||||
bucketToContents.keySet(),
|
||||
new Function<String, Metadata>() {
|
||||
public Metadata apply(String name) {
|
||||
return new S3Bucket.Metadata(name);
|
||||
}
|
||||
}));
|
||||
public Future<Boolean> putBucketIfNotExists(final String s3Bucket) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException, ExecutionException {
|
||||
if (!bucketToContents.containsKey(s3Bucket)) {
|
||||
bucketToContents.put(s3Bucket, new ConcurrentHashMap<String, S3Object>());
|
||||
}
|
||||
};
|
||||
}
|
||||
return bucketToContents.containsKey(s3Bucket);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<Boolean> putBucketIfNotExists(String name,
|
||||
PutBucketOptions options) {
|
||||
if (options.getLocationConstraint() != null)
|
||||
bucketToLocation.put(name, options.getLocationConstraint());
|
||||
keyToAcl.put(name, options.getAcl());
|
||||
return putBucketIfNotExists(name);
|
||||
}
|
||||
|
||||
class DelimiterFilter implements Predicate<S3Object.Metadata> {
|
||||
private final String prefix;
|
||||
private final String delimiter;
|
||||
|
||||
DelimiterFilter(String prefix, String delimiter) {
|
||||
this.prefix = prefix;
|
||||
this.delimiter = delimiter;
|
||||
}
|
||||
|
||||
public boolean apply(S3Object.Metadata metadata) {
|
||||
if (prefix == null)
|
||||
return metadata.getKey().indexOf(delimiter) == -1;
|
||||
if (metadata.getKey().startsWith(prefix))
|
||||
return metadata.getKey().replaceFirst(prefix, "").indexOf(delimiter) == -1;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
class CommonPrefixes implements Function<S3Object.Metadata, String> {
|
||||
private final String prefix;
|
||||
private final String delimiter;
|
||||
static final String NO_PREFIX = "NO_PREFIX";
|
||||
|
||||
CommonPrefixes(String prefix, String delimiter) {
|
||||
this.prefix = prefix;
|
||||
this.delimiter = delimiter;
|
||||
}
|
||||
|
||||
public String apply(S3Object.Metadata metadata) {
|
||||
String working = metadata.getKey();
|
||||
|
||||
if (prefix != null) {
|
||||
if (working.startsWith(prefix)) {
|
||||
working = working.replaceFirst(prefix, "");
|
||||
}
|
||||
public Future<Boolean> deleteBucketIfEmpty(final String s3Bucket) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException, ExecutionException {
|
||||
if (bucketToContents.containsKey(s3Bucket)) {
|
||||
if (bucketToContents.get(s3Bucket).size() == 0)
|
||||
bucketToContents.remove(s3Bucket);
|
||||
else
|
||||
return false;
|
||||
}
|
||||
if (working.contains(delimiter)) {
|
||||
return working.substring(0, working.indexOf(delimiter));
|
||||
}
|
||||
return NO_PREFIX;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<S3Bucket> listBucket(final String name, final ListBucketOptions options) {
|
||||
return new FutureBase<S3Bucket>() {
|
||||
public S3Bucket get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
final Map<String, S3Object> realContents = bucketToContents
|
||||
.get(name);
|
||||
XStream xstream = new XStream();
|
||||
|
||||
if (realContents == null) return S3Bucket.NOT_FOUND;
|
||||
SortedSet<S3Object.Metadata> contents = Sets.newTreeSet(
|
||||
Iterables.transform(realContents.keySet(),
|
||||
new Function<String, S3Object.Metadata>() {
|
||||
public S3Object.Metadata apply(String key) {
|
||||
return realContents.get(key).getMetadata();
|
||||
}
|
||||
}));
|
||||
S3Bucket returnVal = new S3Bucket(name);
|
||||
public Future<S3Object.Metadata> copyObject(final String sourceBucket,
|
||||
final String sourceObject, final String destinationBucket,
|
||||
final String destinationObject) {
|
||||
return copyObject(sourceBucket, sourceObject, destinationBucket, destinationObject,
|
||||
new CopyObjectOptions());
|
||||
}
|
||||
|
||||
if (options.getMarker() != null) {
|
||||
final String marker;
|
||||
try {
|
||||
marker = URLDecoder.decode(options.getMarker(), "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
S3Object.Metadata lastMarkerMetadata =
|
||||
Iterables.find(contents, new Predicate<S3Object.Metadata>() {
|
||||
public boolean apply(S3Object.Metadata metadata) {
|
||||
return metadata.getKey().equals(marker);
|
||||
}
|
||||
});
|
||||
contents = contents.tailSet(lastMarkerMetadata);
|
||||
// amazon spec means after the marker, not including it.
|
||||
contents.remove(lastMarkerMetadata);
|
||||
returnVal.setMarker(marker);
|
||||
}
|
||||
public Future<Boolean> bucketExists(final String s3Bucket) {
|
||||
return new FutureBase<Boolean>() {
|
||||
public Boolean get() throws InterruptedException, ExecutionException {
|
||||
return bucketToContents.containsKey(s3Bucket);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public Future<S3Bucket> listBucket(final String s3Bucket) {
|
||||
return listBucket(s3Bucket, new ListBucketOptions());
|
||||
}
|
||||
|
||||
if (options.getPrefix() != null) {
|
||||
contents = Sets.newTreeSet(Iterables.filter(contents, new Predicate<S3Object.Metadata>() {
|
||||
private abstract class FutureBase<V> implements Future<V> {
|
||||
public boolean cancel(boolean b) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean apply(S3Object.Metadata o) {
|
||||
return (o != null && o.getKey().startsWith(URLDecoder.decode(options.getPrefix())));
|
||||
public boolean isCancelled() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isDone() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public V get(long l, TimeUnit timeUnit) throws InterruptedException, ExecutionException,
|
||||
TimeoutException {
|
||||
return get();
|
||||
}
|
||||
}
|
||||
|
||||
public Future<List<Metadata>> listOwnedBuckets() {
|
||||
return new FutureBase<List<S3Bucket.Metadata>>() {
|
||||
public List<S3Bucket.Metadata> get() throws InterruptedException, ExecutionException {
|
||||
return Lists.newArrayList(Iterables.transform(bucketToContents.keySet(),
|
||||
new Function<String, Metadata>() {
|
||||
public Metadata apply(String name) {
|
||||
return new S3Bucket.Metadata(name);
|
||||
}
|
||||
}));
|
||||
returnVal.setPrefix(URLDecoder.decode(options.getPrefix()));
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (options.getDelimiter() != null) {
|
||||
Iterable<String> iterable = Iterables.transform(contents, new CommonPrefixes(
|
||||
options.getPrefix() != null ? URLDecoder.decode(options.getPrefix()) : null, URLDecoder.decode(options.getDelimiter())));
|
||||
Set<String> commonPrefixes = iterable != null ? Sets.newTreeSet(iterable) : new HashSet<String>();
|
||||
commonPrefixes.remove(CommonPrefixes.NO_PREFIX);
|
||||
public Future<Boolean> putBucketIfNotExists(String name, PutBucketOptions options) {
|
||||
if (options.getLocationConstraint() != null)
|
||||
bucketToLocation.put(name, options.getLocationConstraint());
|
||||
keyToAcl.put(name, options.getAcl());
|
||||
return putBucketIfNotExists(name);
|
||||
}
|
||||
|
||||
contents = Sets.newTreeSet(Iterables.filter(contents, new DelimiterFilter(
|
||||
options.getPrefix() != null ? URLDecoder.decode(options.getPrefix()) : null, URLDecoder.decode(options.getDelimiter()))));
|
||||
class DelimiterFilter implements Predicate<S3Object.Metadata> {
|
||||
private final String prefix;
|
||||
private final String delimiter;
|
||||
|
||||
returnVal.setCommonPrefixes(commonPrefixes);
|
||||
returnVal.setDelimiter(URLDecoder.decode(options.getDelimiter()));
|
||||
}
|
||||
if (options.getMaxKeys() != null) {
|
||||
contents = firstSliceOfSize(contents, Integer.parseInt(options.getMaxKeys()));
|
||||
returnVal.setMaxKeys(Integer.parseInt(options.getMaxKeys()));
|
||||
returnVal.setTruncated(true);
|
||||
}
|
||||
DelimiterFilter(String prefix, String delimiter) {
|
||||
this.prefix = prefix;
|
||||
this.delimiter = delimiter;
|
||||
}
|
||||
|
||||
returnVal.setContents(contents);
|
||||
return returnVal;
|
||||
public boolean apply(S3Object.Metadata metadata) {
|
||||
if (prefix == null)
|
||||
return metadata.getKey().indexOf(delimiter) == -1;
|
||||
if (metadata.getKey().startsWith(prefix))
|
||||
return metadata.getKey().replaceFirst(prefix, "").indexOf(delimiter) == -1;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
class CommonPrefixes implements Function<S3Object.Metadata, String> {
|
||||
private final String prefix;
|
||||
private final String delimiter;
|
||||
static final String NO_PREFIX = "NO_PREFIX";
|
||||
|
||||
CommonPrefixes(String prefix, String delimiter) {
|
||||
this.prefix = prefix;
|
||||
this.delimiter = delimiter;
|
||||
}
|
||||
|
||||
public String apply(S3Object.Metadata metadata) {
|
||||
String working = metadata.getKey();
|
||||
|
||||
if (prefix != null) {
|
||||
if (working.startsWith(prefix)) {
|
||||
working = working.replaceFirst(prefix, "");
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
if (working.contains(delimiter)) {
|
||||
return working.substring(0, working.indexOf(delimiter));
|
||||
}
|
||||
return NO_PREFIX;
|
||||
}
|
||||
}
|
||||
|
||||
public static <T extends Comparable> SortedSet<T> firstSliceOfSize(Iterable<T> elements, int size) {
|
||||
List<List<T>> slices = Lists.partition(
|
||||
Lists.newArrayList(elements), size);
|
||||
return Sets.newTreeSet(slices.get(0));
|
||||
}
|
||||
public Future<S3Bucket> listBucket(final String name, final ListBucketOptions options) {
|
||||
return new FutureBase<S3Bucket>() {
|
||||
public S3Bucket get() throws InterruptedException, ExecutionException {
|
||||
final Map<String, S3Object> realContents = bucketToContents.get(name);
|
||||
|
||||
public Future<org.jclouds.aws.s3.domain.S3Object.Metadata> copyObject(
|
||||
final String sourceBucket, final String sourceObject, final String destinationBucket,
|
||||
if (realContents == null)
|
||||
return S3Bucket.NOT_FOUND;
|
||||
SortedSet<S3Object.Metadata> contents = Sets.newTreeSet(Iterables.transform(
|
||||
realContents.keySet(), new Function<String, S3Object.Metadata>() {
|
||||
public S3Object.Metadata apply(String key) {
|
||||
return realContents.get(key).getMetadata();
|
||||
}
|
||||
}));
|
||||
S3Bucket returnVal = new S3Bucket(name);
|
||||
|
||||
if (options.getMarker() != null) {
|
||||
final String marker;
|
||||
try {
|
||||
marker = URLDecoder.decode(options.getMarker(), "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
S3Object.Metadata lastMarkerMetadata = Iterables.find(contents,
|
||||
new Predicate<S3Object.Metadata>() {
|
||||
public boolean apply(S3Object.Metadata metadata) {
|
||||
return metadata.getKey().equals(marker);
|
||||
}
|
||||
});
|
||||
contents = contents.tailSet(lastMarkerMetadata);
|
||||
// amazon spec means after the marker, not including it.
|
||||
contents.remove(lastMarkerMetadata);
|
||||
returnVal.setMarker(marker);
|
||||
}
|
||||
|
||||
if (options.getPrefix() != null) {
|
||||
contents = Sets.newTreeSet(Iterables.filter(contents,
|
||||
new Predicate<S3Object.Metadata>() {
|
||||
|
||||
public boolean apply(S3Object.Metadata o) {
|
||||
return (o != null && o.getKey().startsWith(
|
||||
URLDecoder.decode(options.getPrefix())));
|
||||
}
|
||||
}));
|
||||
returnVal.setPrefix(URLDecoder.decode(options.getPrefix()));
|
||||
}
|
||||
|
||||
if (options.getDelimiter() != null) {
|
||||
Iterable<String> iterable = Iterables.transform(contents, new CommonPrefixes(options
|
||||
.getPrefix() != null ? URLDecoder.decode(options.getPrefix()) : null,
|
||||
URLDecoder.decode(options.getDelimiter())));
|
||||
Set<String> commonPrefixes = iterable != null ? Sets.newTreeSet(iterable)
|
||||
: new HashSet<String>();
|
||||
commonPrefixes.remove(CommonPrefixes.NO_PREFIX);
|
||||
|
||||
contents = Sets.newTreeSet(Iterables.filter(contents, new DelimiterFilter(options
|
||||
.getPrefix() != null ? URLDecoder.decode(options.getPrefix()) : null,
|
||||
URLDecoder.decode(options.getDelimiter()))));
|
||||
|
||||
returnVal.setCommonPrefixes(commonPrefixes);
|
||||
returnVal.setDelimiter(URLDecoder.decode(options.getDelimiter()));
|
||||
}
|
||||
if (options.getMaxKeys() != null) {
|
||||
contents = firstSliceOfSize(contents, Integer.parseInt(options.getMaxKeys()));
|
||||
returnVal.setMaxKeys(Integer.parseInt(options.getMaxKeys()));
|
||||
returnVal.setTruncated(true);
|
||||
}
|
||||
|
||||
returnVal.setContents(contents);
|
||||
return returnVal;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static <T extends Comparable> SortedSet<T> firstSliceOfSize(Iterable<T> elements, int size) {
|
||||
List<List<T>> slices = Lists.partition(Lists.newArrayList(elements), size);
|
||||
return Sets.newTreeSet(slices.get(0));
|
||||
}
|
||||
|
||||
public Future<org.jclouds.aws.s3.domain.S3Object.Metadata> copyObject(final String sourceBucket,
|
||||
final String sourceObject, final String destinationBucket,
|
||||
final String destinationObject, final CopyObjectOptions options) {
|
||||
|
||||
return new FutureBase<S3Object.Metadata>() {
|
||||
public S3Object.Metadata get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
Map<String, S3Object> source = bucketToContents.get(sourceBucket);
|
||||
Map<String, S3Object> dest = bucketToContents
|
||||
.get(destinationBucket);
|
||||
if (source.containsKey(sourceObject)) {
|
||||
S3Object object = source.get(sourceObject);
|
||||
if (options.getIfMatch() != null) {
|
||||
if (!Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options.getIfMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(412);
|
||||
return new FutureBase<S3Object.Metadata>() {
|
||||
public S3Object.Metadata get() throws InterruptedException, ExecutionException {
|
||||
Map<String, S3Object> source = bucketToContents.get(sourceBucket);
|
||||
Map<String, S3Object> dest = bucketToContents.get(destinationBucket);
|
||||
if (source.containsKey(sourceObject)) {
|
||||
S3Object object = source.get(sourceObject);
|
||||
if (options.getIfMatch() != null) {
|
||||
if (!Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options
|
||||
.getIfMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(412);
|
||||
|
||||
}
|
||||
if (options.getIfNoneMatch() != null) {
|
||||
if (Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options.getIfNoneMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(412);
|
||||
}
|
||||
if (options.getIfModifiedSince() != null) {
|
||||
DateTime modifiedSince = dateService.rfc822DateParse(options.getIfModifiedSince());
|
||||
if (modifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
}
|
||||
if (options.getIfNoneMatch() != null) {
|
||||
if (Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options
|
||||
.getIfNoneMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(412);
|
||||
}
|
||||
if (options.getIfModifiedSince() != null) {
|
||||
DateTime modifiedSince = dateService
|
||||
.rfc822DateParse(options.getIfModifiedSince());
|
||||
if (modifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
|
||||
}
|
||||
if (options.getIfUnmodifiedSince() != null) {
|
||||
DateTime unmodifiedSince = dateService.rfc822DateParse(options.getIfUnmodifiedSince());
|
||||
if (unmodifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
}
|
||||
S3Object sourceS3 = source.get(sourceObject);
|
||||
S3Object.Metadata newMd = copy(sourceS3.getMetadata(), destinationObject);
|
||||
if (options.getAcl() != null)
|
||||
keyToAcl.put(destinationBucket + destinationObject, options.getAcl());
|
||||
if (options.getMetadata() != null) {
|
||||
newMd.setUserMetadata(options.getMetadata());
|
||||
}
|
||||
newMd.setLastModified(new DateTime());
|
||||
dest.put(destinationObject, new S3Object(newMd,
|
||||
sourceS3.getData()));
|
||||
return copy(newMd);
|
||||
}
|
||||
return S3Object.Metadata.NOT_FOUND;
|
||||
}
|
||||
if (options.getIfUnmodifiedSince() != null) {
|
||||
DateTime unmodifiedSince = dateService.rfc822DateParse(options
|
||||
.getIfUnmodifiedSince());
|
||||
if (unmodifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
}
|
||||
S3Object sourceS3 = source.get(sourceObject);
|
||||
S3Object.Metadata newMd = copy(sourceS3.getMetadata(), destinationObject);
|
||||
if (options.getAcl() != null)
|
||||
keyToAcl.put(destinationBucket + destinationObject, options.getAcl());
|
||||
if (options.getMetadata() != null) {
|
||||
newMd.setUserMetadata(options.getMetadata());
|
||||
}
|
||||
newMd.setLastModified(new DateTime());
|
||||
dest.put(destinationObject, new S3Object(newMd, sourceS3.getData()));
|
||||
return copy(newMd);
|
||||
}
|
||||
};
|
||||
}
|
||||
return S3Object.Metadata.NOT_FOUND;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private void throwResponseException(int code) throws ExecutionException {
|
||||
HttpResponse response = new HttpResponse();
|
||||
response.setStatusCode(code);
|
||||
throw new ExecutionException(
|
||||
new HttpResponseException(createNiceMock(CopyObject.class), response));
|
||||
}
|
||||
private void throwResponseException(int code) throws ExecutionException {
|
||||
HttpResponse response = new HttpResponse();
|
||||
response.setStatusCode(code);
|
||||
throw new ExecutionException(new HttpResponseException(createNiceMock(CopyObject.class),
|
||||
response));
|
||||
}
|
||||
|
||||
public Future<byte[]> putObject(final String bucketName, final S3Object object,
|
||||
final PutObjectOptions options) {
|
||||
if (!bucketToContents.containsKey(bucketName)) {
|
||||
new RuntimeException(
|
||||
"bucketName not found: " + bucketName);
|
||||
}
|
||||
try {
|
||||
S3Object.Metadata newMd = copy(object.getMetadata());
|
||||
newMd.setLastModified(new DateTime());
|
||||
byte[] data = toByteArray(object.getData());
|
||||
final byte[] md5 = S3Utils.md5(data);
|
||||
newMd.setMd5(md5);
|
||||
newMd.setContentType("binary/octet-stream");
|
||||
if (options.getAcl() != null)
|
||||
keyToAcl.put(bucketName + object, options.getAcl());
|
||||
bucketToContents.get(bucketName).put(object.getKey(),
|
||||
new S3Object(newMd, data));
|
||||
return new FutureBase<byte[]>() {
|
||||
public byte[] get() throws InterruptedException, ExecutionException {
|
||||
return md5;
|
||||
}
|
||||
};
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
DateService dateService = new DateService();
|
||||
|
||||
public Future<S3Object> getObject(final String bucketName, final String key,
|
||||
final GetObjectOptions options) {
|
||||
return new FutureBase<S3Object>() {
|
||||
public S3Object get() throws InterruptedException,
|
||||
ExecutionException {
|
||||
if (!bucketToContents.containsKey(bucketName))
|
||||
return S3Object.NOT_FOUND;
|
||||
Map<String, S3Object> realContents = bucketToContents
|
||||
.get(bucketName);
|
||||
if (!realContents.containsKey(key))
|
||||
return S3Object.NOT_FOUND;
|
||||
|
||||
S3Object object = realContents.get(key);
|
||||
|
||||
if (options.getIfMatch() != null) {
|
||||
if (!Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options.getIfMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(412);
|
||||
}
|
||||
if (options.getIfNoneMatch() != null) {
|
||||
if (Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options.getIfNoneMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(304);
|
||||
}
|
||||
if (options.getIfModifiedSince() != null) {
|
||||
DateTime modifiedSince = dateService.rfc822DateParse(options.getIfModifiedSince());
|
||||
if (modifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
|
||||
}
|
||||
if (options.getIfUnmodifiedSince() != null) {
|
||||
DateTime unmodifiedSince = dateService.rfc822DateParse(options.getIfUnmodifiedSince());
|
||||
if (unmodifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
}
|
||||
S3Object returnVal = new S3Object(copy(object.getMetadata()), object.getData());
|
||||
if (options.getRange() != null) {
|
||||
byte[] data = (byte[]) returnVal.getData();
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
for (String s : options.getRange().replaceAll("bytes=", "").split(",")) {
|
||||
if (s.startsWith("-")) {
|
||||
int length = Integer.parseInt(s.replaceAll("\\-", ""));
|
||||
out.write(data, data.length - length, length);
|
||||
} else if (s.endsWith("-")) {
|
||||
int offset = Integer.parseInt(s.replaceAll("\\-", ""));
|
||||
out.write(data, offset, data.length - offset);
|
||||
} else if (s.contains("-")) {
|
||||
String[] firstLast = s.split("\\-");
|
||||
int offset = Integer.parseInt(firstLast[0]);
|
||||
int last = Integer.parseInt(firstLast[1]);
|
||||
int length = (last < data.length) ? last + 1 : data.length - offset;
|
||||
|
||||
out.write(data, offset, length);
|
||||
} else {
|
||||
throw new IllegalArgumentException("first and last were null!");
|
||||
}
|
||||
|
||||
}
|
||||
returnVal.setData(out.toByteArray());
|
||||
returnVal.setContentLength(out.size());
|
||||
returnVal.getMetadata().setSize(data.length);
|
||||
}
|
||||
returnVal.setData(new ByteArrayInputStream((byte[]) returnVal.getData()));
|
||||
return returnVal;
|
||||
public Future<byte[]> putObject(final String bucketName, final S3Object object,
|
||||
final PutObjectOptions options) {
|
||||
if (!bucketToContents.containsKey(bucketName)) {
|
||||
new RuntimeException("bucketName not found: " + bucketName);
|
||||
}
|
||||
try {
|
||||
S3Object.Metadata newMd = copy(object.getMetadata());
|
||||
newMd.setLastModified(new DateTime());
|
||||
byte[] data = toByteArray(object.getData());
|
||||
final byte[] md5 = S3Utils.md5(data);
|
||||
newMd.setMd5(md5);
|
||||
newMd.setContentType("binary/octet-stream");
|
||||
if (options.getAcl() != null)
|
||||
keyToAcl.put(bucketName + object, options.getAcl());
|
||||
bucketToContents.get(bucketName).put(object.getKey(), new S3Object(newMd, data));
|
||||
return new FutureBase<byte[]>() {
|
||||
public byte[] get() throws InterruptedException, ExecutionException {
|
||||
return md5;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
DateService dateService = new DateService();
|
||||
|
||||
public Future<S3Object> getObject(final String bucketName, final String key,
|
||||
final GetObjectOptions options) {
|
||||
return new FutureBase<S3Object>() {
|
||||
public S3Object get() throws InterruptedException, ExecutionException {
|
||||
if (!bucketToContents.containsKey(bucketName))
|
||||
return S3Object.NOT_FOUND;
|
||||
Map<String, S3Object> realContents = bucketToContents.get(bucketName);
|
||||
if (!realContents.containsKey(key))
|
||||
return S3Object.NOT_FOUND;
|
||||
|
||||
S3Object object = realContents.get(key);
|
||||
|
||||
if (options.getIfMatch() != null) {
|
||||
if (!Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options
|
||||
.getIfMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(412);
|
||||
}
|
||||
if (options.getIfNoneMatch() != null) {
|
||||
if (Arrays.equals(object.getMetadata().getMd5(), S3Utils.fromHexString(options
|
||||
.getIfNoneMatch().replaceAll("\"", ""))))
|
||||
throwResponseException(304);
|
||||
}
|
||||
if (options.getIfModifiedSince() != null) {
|
||||
DateTime modifiedSince = dateService.rfc822DateParse(options.getIfModifiedSince());
|
||||
if (modifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
|
||||
}
|
||||
if (options.getIfUnmodifiedSince() != null) {
|
||||
DateTime unmodifiedSince = dateService.rfc822DateParse(options
|
||||
.getIfUnmodifiedSince());
|
||||
if (unmodifiedSince.isAfter(object.getMetadata().getLastModified()))
|
||||
throw new ExecutionException(new RuntimeException("after"));
|
||||
}
|
||||
S3Object returnVal = new S3Object(copy(object.getMetadata()), object.getData());
|
||||
if (options.getRange() != null) {
|
||||
byte[] data = (byte[]) returnVal.getData();
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
for (String s : options.getRange().replaceAll("bytes=", "").split(",")) {
|
||||
if (s.startsWith("-")) {
|
||||
int length = Integer.parseInt(s.replaceAll("\\-", ""));
|
||||
out.write(data, data.length - length, length);
|
||||
} else if (s.endsWith("-")) {
|
||||
int offset = Integer.parseInt(s.replaceAll("\\-", ""));
|
||||
out.write(data, offset, data.length - offset);
|
||||
} else if (s.contains("-")) {
|
||||
String[] firstLast = s.split("\\-");
|
||||
int offset = Integer.parseInt(firstLast[0]);
|
||||
int last = Integer.parseInt(firstLast[1]);
|
||||
int length = (last < data.length) ? last + 1 : data.length - offset;
|
||||
|
||||
out.write(data, offset, length);
|
||||
} else {
|
||||
throw new IllegalArgumentException("first and last were null!");
|
||||
}
|
||||
|
||||
}
|
||||
returnVal.setData(out.toByteArray());
|
||||
returnVal.setContentLength(out.size());
|
||||
returnVal.getMetadata().setSize(data.length);
|
||||
}
|
||||
returnVal.setData(new ByteArrayInputStream((byte[]) returnVal.getData()));
|
||||
return returnVal;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,8 +34,6 @@ import org.jclouds.aws.s3.domain.S3Object.Metadata;
|
|||
import org.jclouds.http.HttpException;
|
||||
import org.jclouds.http.HttpHeaders;
|
||||
import org.jclouds.http.HttpResponse;
|
||||
import org.testng.annotations.AfterTest;
|
||||
import org.testng.annotations.BeforeTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
|
@ -43,58 +41,50 @@ import org.testng.annotations.Test;
|
|||
*/
|
||||
@Test(groups = "unit", testName = "s3.ParseObjectFromHeadersAndHttpContentTest")
|
||||
public class ParseObjectFromHeadersAndHttpContentTest {
|
||||
ParseObjectFromHeadersAndHttpContent callable;
|
||||
ParseMetadataFromHeaders metadataParser;
|
||||
ParseObjectFromHeadersAndHttpContent callable;
|
||||
ParseMetadataFromHeaders metadataParser;
|
||||
|
||||
@BeforeTest
|
||||
void setUp() {
|
||||
metadataParser = createMock(ParseMetadataFromHeaders.class);
|
||||
callable = new ParseObjectFromHeadersAndHttpContent(metadataParser);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testCall() throws HttpException {
|
||||
metadataParser = createMock(ParseMetadataFromHeaders.class);
|
||||
callable = new ParseObjectFromHeadersAndHttpContent(metadataParser);
|
||||
HttpResponse response = createMock(HttpResponse.class);
|
||||
expect(response.getStatusCode()).andReturn(409).atLeastOnce();
|
||||
expect(response.getContent()).andReturn(null);
|
||||
replay(response);
|
||||
callable.setResponse(response);
|
||||
callable.call();
|
||||
}
|
||||
|
||||
@AfterTest
|
||||
void tearDown() {
|
||||
callable = null;
|
||||
}
|
||||
@Test
|
||||
public void testParseContentLengthWhenContentRangeSet() throws HttpException {
|
||||
metadataParser = createMock(ParseMetadataFromHeaders.class);
|
||||
callable = new ParseObjectFromHeadersAndHttpContent(metadataParser);
|
||||
HttpResponse response = createMock(HttpResponse.class);
|
||||
metadataParser.setResponse(response);
|
||||
Metadata meta = createMock(Metadata.class);
|
||||
expect(metadataParser.call()).andReturn(meta);
|
||||
expect(meta.getSize()).andReturn(-1l);
|
||||
meta.setSize(-1l);
|
||||
expect(response.getFirstHeaderOrNull(HttpHeaders.CONTENT_LENGTH)).andReturn("10485760")
|
||||
.atLeastOnce();
|
||||
expect(response.getFirstHeaderOrNull(HttpHeaders.CONTENT_RANGE)).andReturn(
|
||||
"0-10485759/20232760").atLeastOnce();
|
||||
meta.setSize(20232760l);
|
||||
expect(meta.getSize()).andReturn(20232760l);
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testCall() throws HttpException {
|
||||
HttpResponse response = createMock(HttpResponse.class);
|
||||
expect(response.getStatusCode()).andReturn(409).atLeastOnce();
|
||||
expect(response.getContent()).andReturn(null);
|
||||
replay(response);
|
||||
callable.setResponse(response);
|
||||
callable.call();
|
||||
}
|
||||
expect(response.getStatusCode()).andReturn(200).atLeastOnce();
|
||||
expect(response.getContent()).andReturn(IOUtils.toInputStream("test"));
|
||||
replay(response);
|
||||
replay(metadataParser);
|
||||
replay(meta);
|
||||
|
||||
@Test
|
||||
public void testParseContentLengthWhenContentRangeSet()
|
||||
throws HttpException {
|
||||
HttpResponse response = createMock(HttpResponse.class);
|
||||
metadataParser.setResponse(response);
|
||||
Metadata meta = createMock(Metadata.class);
|
||||
expect(metadataParser.call()).andReturn(meta);
|
||||
expect(meta.getSize()).andReturn(-1l);
|
||||
meta.setSize(-1l);
|
||||
expect(response.getFirstHeaderOrNull(HttpHeaders.CONTENT_LENGTH))
|
||||
.andReturn("10485760").atLeastOnce();
|
||||
expect(response.getFirstHeaderOrNull(HttpHeaders.CONTENT_RANGE))
|
||||
.andReturn("0-10485759/20232760").atLeastOnce();
|
||||
meta.setSize(20232760l);
|
||||
expect(meta.getSize()).andReturn(20232760l);
|
||||
callable.setResponse(response);
|
||||
S3Object object = callable.call();
|
||||
assertEquals(object.getContentLength(), 10485760);
|
||||
assertEquals(object.getMetadata().getSize(), 20232760);
|
||||
assertEquals(object.getContentRange(), "0-10485759/20232760");
|
||||
|
||||
expect(response.getStatusCode()).andReturn(200).atLeastOnce();
|
||||
expect(response.getContent()).andReturn(IOUtils.toInputStream("test"));
|
||||
replay(response);
|
||||
replay(metadataParser);
|
||||
replay(meta);
|
||||
|
||||
callable.setResponse(response);
|
||||
S3Object object = callable.call();
|
||||
assertEquals(object.getContentLength(), 10485760);
|
||||
assertEquals(object.getMetadata().getSize(), 20232760);
|
||||
assertEquals(object.getContentRange(), "0-10485759/20232760");
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,309 +23,300 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.commands.options;
|
||||
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.*;
|
||||
import org.jclouds.aws.s3.domain.acl.CannedAccessPolicy;
|
||||
import org.jclouds.aws.s3.reference.S3Headers;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.joda.time.DateTime;
|
||||
import static org.testng.Assert.*;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.ifSourceMd5DoesntMatch;
|
||||
import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.ifSourceMd5Matches;
|
||||
import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.ifSourceModifiedSince;
|
||||
import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.ifSourceUnmodifiedSince;
|
||||
import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.overrideAcl;
|
||||
import static org.jclouds.aws.s3.commands.options.CopyObjectOptions.Builder.overrideMetadataWith;
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import static org.testng.Assert.assertNull;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
||||
import org.jclouds.aws.s3.domain.acl.CannedAccessPolicy;
|
||||
import org.jclouds.aws.s3.reference.S3Headers;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.joda.time.DateTime;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import com.google.common.collect.HashMultimap;
|
||||
import com.google.common.collect.Multimap;
|
||||
|
||||
/**
|
||||
* Tests possible uses of CopyObjectOptions and CopyObjectOptions.Builder.*
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
@Test(groups = "unit", testName = "s3.CopyObjectOptionsTest")
|
||||
public class CopyObjectOptionsTest {
|
||||
|
||||
private byte[] testBytes;
|
||||
private DateTime now;
|
||||
private String nowExpected;
|
||||
private Multimap<String, String> goodMeta;
|
||||
private Multimap<String, String> badMeta;
|
||||
private byte[] testBytes;
|
||||
private DateTime now;
|
||||
private String nowExpected;
|
||||
private Multimap<String, String> goodMeta;
|
||||
private Multimap<String, String> badMeta;
|
||||
|
||||
@BeforeMethod
|
||||
void setUp() {
|
||||
goodMeta = HashMultimap.create();
|
||||
goodMeta.put("x-amz-meta-adrian", "foo");
|
||||
badMeta = HashMultimap.create();
|
||||
badMeta.put("x-google-meta-adrian", "foo");
|
||||
@BeforeMethod
|
||||
void setUp() {
|
||||
goodMeta = HashMultimap.create();
|
||||
goodMeta.put("x-amz-meta-adrian", "foo");
|
||||
badMeta = HashMultimap.create();
|
||||
badMeta.put("x-google-meta-adrian", "foo");
|
||||
|
||||
now = new DateTime();
|
||||
nowExpected = new DateService().rfc822DateFormat(now);
|
||||
testBytes = new byte[]{0, 1, 2, 3, 4, 5, 6, 7};
|
||||
}
|
||||
now = new DateTime();
|
||||
nowExpected = new DateService().rfc822DateFormat(now);
|
||||
testBytes = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 };
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGoodMetaStatic() {
|
||||
CopyObjectOptions options = overrideMetadataWith(goodMeta);
|
||||
assertGoodMeta(options);
|
||||
}
|
||||
@Test
|
||||
void testGoodMetaStatic() {
|
||||
CopyObjectOptions options = overrideMetadataWith(goodMeta);
|
||||
assertGoodMeta(options);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testMetaNPE() {
|
||||
overrideMetadataWith(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testMetaNPE() {
|
||||
overrideMetadataWith(null);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testBadMeta() {
|
||||
overrideMetadataWith(badMeta);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testBadMeta() {
|
||||
overrideMetadataWith(badMeta);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testBadMetaStatic() {
|
||||
overrideMetadataWith(badMeta);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testBadMetaStatic() {
|
||||
overrideMetadataWith(badMeta);
|
||||
}
|
||||
|
||||
private void assertGoodMeta(CopyObjectOptions options) {
|
||||
assert options != null;
|
||||
assert options.getMetadata() != null;
|
||||
Multimap<String, String> headers = options.buildRequestHeaders();
|
||||
assertEquals(headers.size(), 2);
|
||||
assertEquals(headers.get(
|
||||
"x-amz-metadata-directive").iterator().next(),
|
||||
"REPLACE");
|
||||
assertEquals(options.getMetadata().size(), 1);
|
||||
assertEquals(headers.get("x-amz-meta-adrian").iterator()
|
||||
.next(), "foo");
|
||||
assertEquals(options.getMetadata().get("x-amz-meta-adrian").iterator()
|
||||
.next(), "foo");
|
||||
}
|
||||
private void assertGoodMeta(CopyObjectOptions options) {
|
||||
assert options != null;
|
||||
assert options.getMetadata() != null;
|
||||
Multimap<String, String> headers = options.buildRequestHeaders();
|
||||
assertEquals(headers.size(), 2);
|
||||
assertEquals(headers.get("x-amz-metadata-directive").iterator().next(), "REPLACE");
|
||||
assertEquals(options.getMetadata().size(), 1);
|
||||
assertEquals(headers.get("x-amz-meta-adrian").iterator().next(), "foo");
|
||||
assertEquals(options.getMetadata().get("x-amz-meta-adrian").iterator().next(), "foo");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGoodMeta() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.overrideMetadataWith(goodMeta);
|
||||
assertGoodMeta(options);
|
||||
}
|
||||
@Test
|
||||
void testGoodMeta() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.overrideMetadataWith(goodMeta);
|
||||
assertGoodMeta(options);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfModifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
@Test
|
||||
public void testIfModifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfModifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfModifiedSince());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfModifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfModifiedSince());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfModifiedSinceStatic() {
|
||||
CopyObjectOptions options = ifSourceModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
@Test
|
||||
public void testIfModifiedSinceStatic() {
|
||||
CopyObjectOptions options = ifSourceModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfModifiedSinceNPE() {
|
||||
ifSourceModifiedSince(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfModifiedSinceNPE() {
|
||||
ifSourceModifiedSince(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfUnmodifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
@Test
|
||||
public void testIfUnmodifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfUnmodifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfUnmodifiedSince());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfUnmodifiedSince() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfUnmodifiedSince());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfUnmodifiedSinceStatic() {
|
||||
CopyObjectOptions options = ifSourceUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
@Test
|
||||
public void testIfUnmodifiedSinceStatic() {
|
||||
CopyObjectOptions options = ifSourceUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
|
||||
private void isNowExpected(CopyObjectOptions options) {
|
||||
assertEquals(options.getIfUnmodifiedSince(), nowExpected);
|
||||
}
|
||||
private void isNowExpected(CopyObjectOptions options) {
|
||||
assertEquals(options.getIfUnmodifiedSince(), nowExpected);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfUnmodifiedSinceNPE() {
|
||||
ifSourceUnmodifiedSince(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfUnmodifiedSinceNPE() {
|
||||
ifSourceUnmodifiedSince(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5Matches() throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5Matches() throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfMd5Matches() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfMatch());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfMd5Matches() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5MatchesStatic() throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = ifSourceMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5MatchesStatic() throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = ifSourceMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5MatchesNPE() throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5MatchesNPE() throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
options.ifSourceMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfMd5DoesntMatch() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfNoneMatch());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfMd5DoesntMatch() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertNull(options.getIfNoneMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5DoesntMatchStatic()
|
||||
throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = ifSourceMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5DoesntMatchStatic() throws UnsupportedEncodingException {
|
||||
CopyObjectOptions options = ifSourceMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5DoesntMatchNPE() throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5DoesntMatchNPE() throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(null);
|
||||
}
|
||||
|
||||
private void matchesHex(String match) throws UnsupportedEncodingException {
|
||||
String expected = "\"" + S3Utils.toHexString(testBytes) + "\"";
|
||||
assertEquals(match, expected);
|
||||
}
|
||||
private void matchesHex(String match) throws UnsupportedEncodingException {
|
||||
String expected = "\"" + S3Utils.toHexString(testBytes) + "\"";
|
||||
assertEquals(match, expected);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfUnmodifiedAfterModified() {
|
||||
ifSourceModifiedSince(now).ifSourceUnmodifiedSince(now);
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfUnmodifiedAfterModified() {
|
||||
ifSourceModifiedSince(now).ifSourceUnmodifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testIfUnmodifiedAfterMd5Matches()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(testBytes).ifSourceUnmodifiedSince(now);
|
||||
public void testIfUnmodifiedAfterMd5Matches() throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(testBytes).ifSourceUnmodifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfUnmodifiedAfterMd5DoesntMatch()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(testBytes).ifSourceUnmodifiedSince(now);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfUnmodifiedAfterMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(testBytes).ifSourceUnmodifiedSince(now);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfModifiedAfterUnmodified() {
|
||||
ifSourceUnmodifiedSince(now).ifSourceModifiedSince(now);
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfModifiedAfterUnmodified() {
|
||||
ifSourceUnmodifiedSince(now).ifSourceModifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfModifiedAfterMd5Matches()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(testBytes).ifSourceModifiedSince(now);
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testIfModifiedAfterMd5Matches() throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(testBytes).ifSourceModifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testIfModifiedAfterMd5DoesntMatch()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(testBytes).ifSourceModifiedSince(now);
|
||||
}
|
||||
public void testIfModifiedAfterMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(testBytes).ifSourceModifiedSince(now);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5MatchesAfterIfModified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceModifiedSince(now).ifSourceMd5Matches(testBytes);
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5MatchesAfterIfModified() throws UnsupportedEncodingException {
|
||||
ifSourceModifiedSince(now).ifSourceMd5Matches(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testMd5MatchesAfterIfUnmodified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceUnmodifiedSince(now).ifSourceMd5Matches(testBytes);
|
||||
public void testMd5MatchesAfterIfUnmodified() throws UnsupportedEncodingException {
|
||||
ifSourceUnmodifiedSince(now).ifSourceMd5Matches(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5MatchesAfterMd5DoesntMatch()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(testBytes).ifSourceMd5Matches(testBytes);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5MatchesAfterMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
ifSourceMd5DoesntMatch(testBytes).ifSourceMd5Matches(testBytes);
|
||||
}
|
||||
|
||||
public void testMd5DoesntMatchAfterIfModified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceModifiedSince(now).ifSourceMd5DoesntMatch(testBytes);
|
||||
public void testMd5DoesntMatchAfterIfModified() throws UnsupportedEncodingException {
|
||||
ifSourceModifiedSince(now).ifSourceMd5DoesntMatch(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5DoesntMatchAfterIfUnmodified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceUnmodifiedSince(now).ifSourceMd5DoesntMatch(testBytes);
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5DoesntMatchAfterIfUnmodified() throws UnsupportedEncodingException {
|
||||
ifSourceUnmodifiedSince(now).ifSourceMd5DoesntMatch(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5DoesntMatchAfterMd5Matches()
|
||||
throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(testBytes).ifSourceMd5DoesntMatch(testBytes);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalStateException.class)
|
||||
public void testMd5DoesntMatchAfterMd5Matches() throws UnsupportedEncodingException {
|
||||
ifSourceMd5Matches(testBytes).ifSourceMd5DoesntMatch(testBytes);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBuildRequestHeadersWhenMetadataNull()
|
||||
throws UnsupportedEncodingException {
|
||||
assert new CopyObjectOptions().buildRequestHeaders() != null;
|
||||
}
|
||||
@Test
|
||||
void testBuildRequestHeadersWhenMetadataNull() throws UnsupportedEncodingException {
|
||||
assert new CopyObjectOptions().buildRequestHeaders() != null;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBuildRequestHeaders() throws UnsupportedEncodingException {
|
||||
@Test
|
||||
void testBuildRequestHeaders() throws UnsupportedEncodingException {
|
||||
|
||||
Multimap<String, String> headers = ifSourceModifiedSince(now)
|
||||
.ifSourceMd5DoesntMatch(testBytes).overrideMetadataWith(
|
||||
goodMeta).buildRequestHeaders();
|
||||
assertEquals(headers.get("x-amz-copy-source-if-modified-since")
|
||||
.iterator().next(), new DateService().rfc822DateFormat(now));
|
||||
assertEquals(headers.get("x-amz-copy-source-if-none-match").iterator()
|
||||
.next(), "\"" + S3Utils.toHexString(testBytes) + "\"");
|
||||
for (String value : goodMeta.values())
|
||||
assertTrue(headers.containsValue(value));
|
||||
Multimap<String, String> headers = ifSourceModifiedSince(now).ifSourceMd5DoesntMatch(
|
||||
testBytes).overrideMetadataWith(goodMeta).buildRequestHeaders();
|
||||
assertEquals(headers.get("x-amz-copy-source-if-modified-since").iterator().next(),
|
||||
new DateService().rfc822DateFormat(now));
|
||||
assertEquals(headers.get("x-amz-copy-source-if-none-match").iterator().next(), "\""
|
||||
+ S3Utils.toHexString(testBytes) + "\"");
|
||||
for (String value : goodMeta.values())
|
||||
assertTrue(headers.containsValue(value));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAclDefault() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertEquals(options.getAcl(), CannedAccessPolicy.PRIVATE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAclDefault() {
|
||||
CopyObjectOptions options = new CopyObjectOptions();
|
||||
assertEquals(options.getAcl(), CannedAccessPolicy.PRIVATE);
|
||||
}
|
||||
@Test
|
||||
public void testAclStatic() {
|
||||
CopyObjectOptions options = overrideAcl(CannedAccessPolicy.AUTHENTICATED_READ);
|
||||
assertEquals(options.getAcl(), CannedAccessPolicy.AUTHENTICATED_READ);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAclStatic() {
|
||||
CopyObjectOptions options = overrideAcl(CannedAccessPolicy.AUTHENTICATED_READ);
|
||||
assertEquals(options.getAcl(), CannedAccessPolicy.AUTHENTICATED_READ);
|
||||
}
|
||||
@Test
|
||||
void testBuildRequestHeadersACL() throws UnsupportedEncodingException {
|
||||
|
||||
@Test
|
||||
void testBuildRequestHeadersACL() throws UnsupportedEncodingException {
|
||||
|
||||
Multimap<String, String> headers = overrideAcl(
|
||||
CannedAccessPolicy.AUTHENTICATED_READ).buildRequestHeaders();
|
||||
assertEquals(headers.get(S3Headers.CANNED_ACL).iterator().next(),
|
||||
CannedAccessPolicy.AUTHENTICATED_READ.toString());
|
||||
}
|
||||
Multimap<String, String> headers = overrideAcl(CannedAccessPolicy.AUTHENTICATED_READ)
|
||||
.buildRequestHeaders();
|
||||
assertEquals(headers.get(S3Headers.CANNED_ACL).iterator().next(),
|
||||
CannedAccessPolicy.AUTHENTICATED_READ.toString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,312 +23,308 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.commands.options;
|
||||
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.*;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.joda.time.DateTime;
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.ifMd5DoesntMatch;
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.ifMd5Matches;
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.ifModifiedSince;
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.ifUnmodifiedSince;
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.range;
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.startAt;
|
||||
import static org.jclouds.aws.s3.commands.options.GetObjectOptions.Builder.tail;
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import static org.testng.Assert.assertNull;
|
||||
import org.testng.annotations.BeforeTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
||||
import org.jclouds.aws.s3.util.S3Utils;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.joda.time.DateTime;
|
||||
import org.testng.annotations.BeforeTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* Tests possible uses of GetObjectOptions and GetObjectOptions.Builder.*
|
||||
*
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
@Test(groups = "unit", testName = "s3.GetObjectOptionsTest")
|
||||
public class GetObjectOptionsTest {
|
||||
|
||||
private byte[] testBytes;
|
||||
private DateTime now;
|
||||
private String nowExpected;
|
||||
private byte[] testBytes;
|
||||
private DateTime now;
|
||||
private String nowExpected;
|
||||
|
||||
@BeforeTest
|
||||
void setUp() {
|
||||
now = new DateTime();
|
||||
nowExpected = new DateService().rfc822DateFormat(now);
|
||||
testBytes = new byte[]{0, 1, 2, 3, 4, 5, 6, 7};
|
||||
}
|
||||
@BeforeTest
|
||||
void setUp() {
|
||||
now = new DateTime();
|
||||
nowExpected = new DateService().rfc822DateFormat(now);
|
||||
testBytes = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 };
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfModifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
@Test
|
||||
public void testIfModifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfModifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfModifiedSince());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfModifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfModifiedSince());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfModifiedSinceStatic() {
|
||||
GetObjectOptions options = ifModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
@Test
|
||||
public void testIfModifiedSinceStatic() {
|
||||
GetObjectOptions options = ifModifiedSince(now);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfModifiedSinceNPE() {
|
||||
ifModifiedSince(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfModifiedSinceNPE() {
|
||||
ifModifiedSince(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfUnmodifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
@Test
|
||||
public void testIfUnmodifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfUnmodifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfUnmodifiedSince());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfUnmodifiedSince() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfUnmodifiedSince());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfUnmodifiedSinceStatic() {
|
||||
GetObjectOptions options = ifUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
@Test
|
||||
public void testIfUnmodifiedSinceStatic() {
|
||||
GetObjectOptions options = ifUnmodifiedSince(now);
|
||||
isNowExpected(options);
|
||||
}
|
||||
|
||||
private void isNowExpected(GetObjectOptions options) {
|
||||
assertEquals(options.getIfUnmodifiedSince(), nowExpected);
|
||||
}
|
||||
private void isNowExpected(GetObjectOptions options) {
|
||||
assertEquals(options.getIfUnmodifiedSince(), nowExpected);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfUnmodifiedSinceNPE() {
|
||||
ifUnmodifiedSince(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfUnmodifiedSinceNPE() {
|
||||
ifUnmodifiedSince(null);
|
||||
}
|
||||
|
||||
public void testModifiedSinceAndRange() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifModifiedSince(now);
|
||||
options.range(0, 1024);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
bytes1to1024(options);
|
||||
}
|
||||
public void testModifiedSinceAndRange() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifModifiedSince(now);
|
||||
options.range(0, 1024);
|
||||
assertEquals(options.getIfModifiedSince(), nowExpected);
|
||||
bytes1to1024(options);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRange() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.range(0, 1024);
|
||||
bytes1to1024(options);
|
||||
}
|
||||
@Test
|
||||
public void testRange() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.range(0, 1024);
|
||||
bytes1to1024(options);
|
||||
}
|
||||
|
||||
private void bytes1to1024(GetObjectOptions options) {
|
||||
assertEquals(options.getRange(), "bytes=0-1024");
|
||||
}
|
||||
private void bytes1to1024(GetObjectOptions options) {
|
||||
assertEquals(options.getRange(), "bytes=0-1024");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRangeZeroToFive() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.range(0, 5);
|
||||
assertEquals(options.getRange(), "bytes=0-5");
|
||||
}
|
||||
@Test
|
||||
public void testRangeZeroToFive() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.range(0, 5);
|
||||
assertEquals(options.getRange(), "bytes=0-5");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTail() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.tail(100);
|
||||
assertEquals(options.getRange(), "bytes=-100");
|
||||
}
|
||||
@Test
|
||||
public void testTail() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.tail(100);
|
||||
assertEquals(options.getRange(), "bytes=-100");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTailStatic() {
|
||||
GetObjectOptions options = tail(100);
|
||||
assertEquals(options.getRange(), "bytes=-100");
|
||||
}
|
||||
@Test
|
||||
public void testTailStatic() {
|
||||
GetObjectOptions options = tail(100);
|
||||
assertEquals(options.getRange(), "bytes=-100");
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testTailFail() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.tail(0);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testTailFail() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.tail(0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStartAt() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.startAt(100);
|
||||
assertEquals(options.getRange(), "bytes=100-");
|
||||
}
|
||||
@Test
|
||||
public void testStartAt() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.startAt(100);
|
||||
assertEquals(options.getRange(), "bytes=100-");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStartAtStatic() {
|
||||
GetObjectOptions options = startAt(100);
|
||||
assertEquals(options.getRange(), "bytes=100-");
|
||||
}
|
||||
@Test
|
||||
public void testStartAtStatic() {
|
||||
GetObjectOptions options = startAt(100);
|
||||
assertEquals(options.getRange(), "bytes=100-");
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testStartAtFail() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.startAt(-1);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testStartAtFail() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.startAt(-1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRangeZeroToFiveAnd10through100() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.range(0, 5).range(10, 100);
|
||||
assertEquals(options.getRange(), "bytes=0-5,10-100");
|
||||
}
|
||||
@Test
|
||||
public void testRangeZeroToFiveAnd10through100() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.range(0, 5).range(10, 100);
|
||||
assertEquals(options.getRange(), "bytes=0-5,10-100");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullRange() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getRange());
|
||||
}
|
||||
@Test
|
||||
public void testNullRange() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getRange());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRangeStatic() {
|
||||
GetObjectOptions options = range(0, 1024);
|
||||
bytes1to1024(options);
|
||||
}
|
||||
@Test
|
||||
public void testRangeStatic() {
|
||||
GetObjectOptions options = range(0, 1024);
|
||||
bytes1to1024(options);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testRangeNegative1() {
|
||||
range(-1, 0);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testRangeNegative1() {
|
||||
range(-1, 0);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testRangeNegative2() {
|
||||
range(0, -1);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testRangeNegative2() {
|
||||
range(0, -1);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testRangeNegative() {
|
||||
range(-1, -1);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testRangeNegative() {
|
||||
range(-1, -1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5Matches() throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5Matches() throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfMd5Matches() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfMatch());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfMd5Matches() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5MatchesStatic() throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = ifMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5MatchesStatic() throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = ifMd5Matches(testBytes);
|
||||
matchesHex(options.getIfMatch());
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5MatchesNPE() throws UnsupportedEncodingException {
|
||||
ifMd5Matches(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5MatchesNPE() throws UnsupportedEncodingException {
|
||||
ifMd5Matches(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
options.ifMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullIfMd5DoesntMatch() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfNoneMatch());
|
||||
}
|
||||
@Test
|
||||
public void testNullIfMd5DoesntMatch() {
|
||||
GetObjectOptions options = new GetObjectOptions();
|
||||
assertNull(options.getIfNoneMatch());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfMd5DoesntMatchStatic()
|
||||
throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = ifMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
@Test
|
||||
public void testIfMd5DoesntMatchStatic() throws UnsupportedEncodingException {
|
||||
GetObjectOptions options = ifMd5DoesntMatch(testBytes);
|
||||
matchesHex(options.getIfNoneMatch());
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5DoesntMatchNPE() throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(null);
|
||||
}
|
||||
@Test(expectedExceptions = NullPointerException.class)
|
||||
public void testIfMd5DoesntMatchNPE() throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(null);
|
||||
}
|
||||
|
||||
private void matchesHex(String match) throws UnsupportedEncodingException {
|
||||
String expected = "\"" + S3Utils.toHexString(testBytes) + "\"";
|
||||
assertEquals(match, expected);
|
||||
}
|
||||
private void matchesHex(String match) throws UnsupportedEncodingException {
|
||||
String expected = "\"" + S3Utils.toHexString(testBytes) + "\"";
|
||||
assertEquals(match, expected);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfUnmodifiedAfterModified() {
|
||||
ifModifiedSince(now).ifUnmodifiedSince(now);
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfUnmodifiedAfterModified() {
|
||||
ifModifiedSince(now).ifUnmodifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testIfUnmodifiedAfterMd5Matches()
|
||||
throws UnsupportedEncodingException {
|
||||
ifMd5Matches(testBytes).ifUnmodifiedSince(now);
|
||||
public void testIfUnmodifiedAfterMd5Matches() throws UnsupportedEncodingException {
|
||||
ifMd5Matches(testBytes).ifUnmodifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfUnmodifiedAfterMd5DoesntMatch()
|
||||
throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(testBytes).ifUnmodifiedSince(now);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfUnmodifiedAfterMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(testBytes).ifUnmodifiedSince(now);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfModifiedAfterUnmodified() {
|
||||
ifUnmodifiedSince(now).ifModifiedSince(now);
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfModifiedAfterUnmodified() {
|
||||
ifUnmodifiedSince(now).ifModifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfModifiedAfterMd5Matches()
|
||||
throws UnsupportedEncodingException {
|
||||
ifMd5Matches(testBytes).ifModifiedSince(now);
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testIfModifiedAfterMd5Matches() throws UnsupportedEncodingException {
|
||||
ifMd5Matches(testBytes).ifModifiedSince(now);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testIfModifiedAfterMd5DoesntMatch()
|
||||
throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(testBytes).ifModifiedSince(now);
|
||||
}
|
||||
public void testIfModifiedAfterMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(testBytes).ifModifiedSince(now);
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5MatchesAfterIfModified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifModifiedSince(now).ifMd5Matches(testBytes);
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5MatchesAfterIfModified() throws UnsupportedEncodingException {
|
||||
ifModifiedSince(now).ifMd5Matches(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void testMd5MatchesAfterIfUnmodified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifUnmodifiedSince(now).ifMd5Matches(testBytes);
|
||||
public void testMd5MatchesAfterIfUnmodified() throws UnsupportedEncodingException {
|
||||
ifUnmodifiedSince(now).ifMd5Matches(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5MatchesAfterMd5DoesntMatch()
|
||||
throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(testBytes).ifMd5Matches(testBytes);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5MatchesAfterMd5DoesntMatch() throws UnsupportedEncodingException {
|
||||
ifMd5DoesntMatch(testBytes).ifMd5Matches(testBytes);
|
||||
}
|
||||
|
||||
public void testMd5DoesntMatchAfterIfModified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifModifiedSince(now).ifMd5DoesntMatch(testBytes);
|
||||
public void testMd5DoesntMatchAfterIfModified() throws UnsupportedEncodingException {
|
||||
ifModifiedSince(now).ifMd5DoesntMatch(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5DoesntMatchAfterIfUnmodified()
|
||||
throws UnsupportedEncodingException {
|
||||
ifUnmodifiedSince(now).ifMd5DoesntMatch(testBytes);
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5DoesntMatchAfterIfUnmodified() throws UnsupportedEncodingException {
|
||||
ifUnmodifiedSince(now).ifMd5DoesntMatch(testBytes);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5DoesntMatchAfterMd5Matches()
|
||||
throws UnsupportedEncodingException {
|
||||
ifMd5Matches(testBytes).ifMd5DoesntMatch(testBytes);
|
||||
}
|
||||
@Test(expectedExceptions = IllegalArgumentException.class)
|
||||
public void testMd5DoesntMatchAfterMd5Matches() throws UnsupportedEncodingException {
|
||||
ifMd5Matches(testBytes).ifMd5DoesntMatch(testBytes);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
package org.jclouds.aws.s3.config;
|
||||
|
||||
import com.google.inject.AbstractModule;
|
||||
import org.jclouds.aws.s3.S3Connection;
|
||||
import org.jclouds.aws.s3.StubS3Connection;
|
||||
|
||||
import com.google.inject.AbstractModule;
|
||||
|
||||
/**
|
||||
* // TODO: Adrian: Document this!
|
||||
*
|
||||
* adds a stub alternative to invoking S3
|
||||
*
|
||||
* @author Adrian Cole
|
||||
*/
|
||||
@S3ConnectionModule
|
||||
public class StubS3ConnectionModule extends AbstractModule {
|
||||
protected void configure() {
|
||||
bind(S3Connection.class).to(StubS3Connection.class);
|
||||
}
|
||||
protected void configure() {
|
||||
bind(S3Connection.class).to(StubS3Connection.class);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,40 +23,41 @@
|
|||
*/
|
||||
package org.jclouds.aws.s3.filters;
|
||||
|
||||
import org.jclouds.aws.s3.reference.S3Constants;
|
||||
import org.jclouds.aws.util.DateService;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import com.google.inject.AbstractModule;
|
||||
import com.google.inject.Guice;
|
||||
import com.google.inject.name.Names;
|
||||
import org.jclouds.aws.s3.reference.S3Constants;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
|
||||
@Test(groups = "unit", testName = "s3.RequestAuthorizeSignatureTest")
|
||||
public class RequestAuthorizeSignatureTest {
|
||||
|
||||
RequestAuthorizeSignature filter = null;
|
||||
RequestAuthorizeSignature filter = null;
|
||||
|
||||
@Test
|
||||
void testUpdatesOnlyOncePerSecond() throws NoSuchMethodException, InterruptedException {
|
||||
filter = Guice.createInjector(new AbstractModule() {
|
||||
@Test
|
||||
void testUpdatesOnlyOncePerSecond() throws NoSuchMethodException, InterruptedException {
|
||||
filter = Guice.createInjector(new AbstractModule() {
|
||||
|
||||
protected void configure() {
|
||||
bindConstant().annotatedWith(Names.named(S3Constants.PROPERTY_AWS_ACCESSKEYID)).to("foo");
|
||||
bindConstant().annotatedWith(Names.named(S3Constants.PROPERTY_AWS_SECRETACCESSKEY)).to("bar");
|
||||
bind(DateService.class);
|
||||
|
||||
}
|
||||
}).getInstance(RequestAuthorizeSignature.class);
|
||||
// filter.createNewStamp();
|
||||
String timeStamp = filter.timestampAsHeaderString();
|
||||
// replay(filter);
|
||||
for (int i = 0; i < 10; i++)
|
||||
filter.updateIfTimeOut();
|
||||
assert timeStamp.equals(filter.timestampAsHeaderString());
|
||||
Thread.sleep(1000);
|
||||
assert !timeStamp.equals(filter.timestampAsHeaderString());
|
||||
// verify(filter);
|
||||
}
|
||||
protected void configure() {
|
||||
bindConstant().annotatedWith(Names.named(S3Constants.PROPERTY_AWS_ACCESSKEYID)).to(
|
||||
"foo");
|
||||
bindConstant().annotatedWith(Names.named(S3Constants.PROPERTY_AWS_SECRETACCESSKEY)).to(
|
||||
"bar");
|
||||
bind(DateService.class);
|
||||
|
||||
}
|
||||
}).getInstance(RequestAuthorizeSignature.class);
|
||||
// filter.createNewStamp();
|
||||
String timeStamp = filter.timestampAsHeaderString();
|
||||
// replay(filter);
|
||||
for (int i = 0; i < 10; i++)
|
||||
filter.updateIfTimeOut();
|
||||
assert timeStamp.equals(filter.timestampAsHeaderString());
|
||||
Thread.sleep(1000);
|
||||
assert !timeStamp.equals(filter.timestampAsHeaderString());
|
||||
// verify(filter);
|
||||
}
|
||||
|
||||
}
|
|
@ -23,25 +23,11 @@
|
|||
*/
|
||||
package com.amazon.s3;
|
||||
|
||||
import static org.testng.Assert.assertEquals;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import org.jclouds.aws.PerformanceTest;
|
||||
import org.jclouds.aws.s3.util.DateService;
|
||||
import org.joda.time.DateTime;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import com.google.inject.Guice;
|
||||
import com.google.inject.Injector;
|
||||
|
||||
/*
|
||||
* TODO: Scrap any non-DateService references (eg Joda & Amazon) if/when
|
||||
* we confirm that the DateService is fast enough.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Compares performance of date operations
|
||||
*
|
||||
|
@ -49,142 +35,7 @@ import com.google.inject.Injector;
|
|||
* @author James Murty
|
||||
*/
|
||||
@Test(sequential = true, timeOut = 2 * 60 * 1000, testName = "s3.DateTest")
|
||||
public class DateServiceTest extends PerformanceTest {
|
||||
Injector i = Guice.createInjector();
|
||||
|
||||
DateService dateService = i.getInstance(DateService.class);
|
||||
|
||||
private TestData[] testData;
|
||||
|
||||
class TestData {
|
||||
public final String iso8601DateString;
|
||||
public final String rfc822DateString;
|
||||
public final DateTime date;
|
||||
|
||||
TestData(String iso8601, String rfc822, DateTime dateTime) {
|
||||
this.iso8601DateString = iso8601;
|
||||
this.rfc822DateString = rfc822;
|
||||
this.date = dateTime;
|
||||
}
|
||||
}
|
||||
|
||||
public DateServiceTest() {
|
||||
// Constant time test values, each TestData item must contain matching times!
|
||||
testData = new TestData[] {
|
||||
new TestData("2009-03-12T02:00:07.000Z", "Thu, 12 Mar 2009 02:00:07 GMT",
|
||||
new DateTime(1236823207000l)),
|
||||
new TestData("2009-03-14T04:00:07.000Z", "Sat, 14 Mar 2009 04:00:07 GMT",
|
||||
new DateTime(1237003207000l)),
|
||||
new TestData("2009-03-16T06:00:07.000Z", "Mon, 16 Mar 2009 06:00:07 GMT",
|
||||
new DateTime(1237183207000l)),
|
||||
new TestData("2009-03-18T08:00:07.000Z", "Wed, 18 Mar 2009 08:00:07 GMT",
|
||||
new DateTime(1237363207000l)),
|
||||
new TestData("2009-03-20T10:00:07.000Z", "Fri, 20 Mar 2009 10:00:07 GMT",
|
||||
new DateTime(1237543207000l)) };
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIso8601DateParse() throws ExecutionException, InterruptedException {
|
||||
DateTime dsDate = dateService.iso8601DateParse(testData[0].iso8601DateString);
|
||||
assertEquals(dsDate, testData[0].date);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRfc822DateParse() throws ExecutionException, InterruptedException {
|
||||
DateTime dsDate = dateService.rfc822DateParse(testData[0].rfc822DateString);
|
||||
assertEquals(dsDate, testData[0].date);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIso8601DateFormat() throws ExecutionException, InterruptedException {
|
||||
String dsString = dateService.iso8601DateFormat(testData[0].date);
|
||||
assertEquals(dsString, testData[0].iso8601DateString);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRfc822DateFormat() throws ExecutionException, InterruptedException {
|
||||
String dsString = dateService.rfc822DateFormat(testData[0].date);
|
||||
assertEquals(dsString, testData[0].rfc822DateString);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testIso8601DateFormatResponseTime() throws ExecutionException, InterruptedException {
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.iso8601DateFormat();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRfc822DateFormatResponseTime() throws ExecutionException, InterruptedException {
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.rfc822DateFormat();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatIso8601DateCorrectnessInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData: testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
String dsString = dateService.iso8601DateFormat(myData.date);
|
||||
assertEquals(dsString, myData.iso8601DateString);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedCorrectnessTest(tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatIso8601DatePerformanceInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData: testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.iso8601DateFormat(myData.date);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest("testFormatIso8601DatePerformanceInParallel", tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatIso8601DatePerformanceInParallel_SdfAlternative() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData: testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.sdfIso8601DateFormat(myData.date);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest(
|
||||
"testFormatIso8601DatePerformanceInParallel_SdfAlternative", tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFormatAmazonDatePerformanceInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
tasks.add(
|
||||
new Runnable() {
|
||||
public void run() {
|
||||
AWSAuthConnection.httpDate();
|
||||
}}
|
||||
);
|
||||
executeMultiThreadedPerformanceTest("testFormatAmazonDatePerformanceInParallel", tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DateSerialResponseTime() throws ExecutionException, InterruptedException {
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.iso8601DateParse(testData[0].iso8601DateString);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DateSerialResponseTime_JodaAlternative()
|
||||
throws ExecutionException, InterruptedException
|
||||
{
|
||||
for (int i = 0; i < LOOP_COUNT; i++)
|
||||
dateService.jodaIso8601DateParse(testData[0].iso8601DateString);
|
||||
}
|
||||
public class DateServiceTest extends org.jclouds.aws.util.DateServiceTest {
|
||||
|
||||
@Test
|
||||
void testAmazonParseDateSerialResponseTime() {
|
||||
|
@ -193,44 +44,14 @@ public class DateServiceTest extends PerformanceTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DateCorrectnessInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData: testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
DateTime dsDate = dateService.iso8601DateParse(myData.iso8601DateString);
|
||||
assertEquals(dsDate, myData.date);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedCorrectnessTest(tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DatePerformanceInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData: testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.iso8601DateParse(myData.iso8601DateString);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest("testParseIso8601DatePerformanceInParallel", tasks);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testParseIso8601DatePerformanceInParallel_JodaAlternative() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
for (final TestData myData: testData) {
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
dateService.jodaIso8601DateParse(myData.iso8601DateString);
|
||||
}
|
||||
});
|
||||
}
|
||||
executeMultiThreadedPerformanceTest(
|
||||
"testParseIso8601DatePerformanceInParallel_JodaAlternative", tasks);
|
||||
void testFormatAmazonDatePerformanceInParallel() throws Throwable {
|
||||
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
|
||||
tasks.add(new Runnable() {
|
||||
public void run() {
|
||||
AWSAuthConnection.httpDate();
|
||||
}
|
||||
});
|
||||
executeMultiThreadedPerformanceTest("testFormatAmazonDatePerformanceInParallel", tasks);
|
||||
}
|
||||
|
||||
}
|
|
@ -43,6 +43,20 @@
|
|||
<module>extensions</module>
|
||||
<module>samples</module>
|
||||
</modules>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>jclouds-aws-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>jclouds-aws-core</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
|
|
|
@ -24,9 +24,9 @@
|
|||
package org.jclouds.http.options;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import com.google.common.collect.HashMultimap;
|
||||
|
@ -40,50 +40,47 @@ import com.google.common.collect.Multimap;
|
|||
*/
|
||||
public class BaseHttpRequestOptions implements HttpRequestOptions {
|
||||
|
||||
protected Map<String, String> options = new HashMap<String, String>();
|
||||
protected Multimap<String, String> headers = HashMultimap.create();
|
||||
protected String payload;
|
||||
protected SortedMap<String, String> parameters = new TreeMap<String, String>();
|
||||
protected Multimap<String, String> headers = HashMultimap.create();
|
||||
protected String payload;
|
||||
|
||||
public String buildPayload() {
|
||||
return payload;
|
||||
}
|
||||
public String buildPayload() {
|
||||
return payload;
|
||||
}
|
||||
|
||||
protected String getFirstHeaderOrNull(String string) {
|
||||
Collection<String> values = headers.get(string);
|
||||
return (values != null && values.size() >= 1) ? values.iterator()
|
||||
.next() : null;
|
||||
}
|
||||
protected String getFirstHeaderOrNull(String string) {
|
||||
Collection<String> values = headers.get(string);
|
||||
return (values != null && values.size() >= 1) ? values.iterator().next() : null;
|
||||
}
|
||||
|
||||
protected void replaceHeader(String key, String value) {
|
||||
headers.removeAll(key);
|
||||
headers.put(key, value);
|
||||
}
|
||||
protected void replaceHeader(String key, String value) {
|
||||
headers.removeAll(key);
|
||||
headers.put(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Multimap<String, String> buildRequestHeaders() {
|
||||
return headers;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Multimap<String, String> buildRequestHeaders() {
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public String buildQueryString() {
|
||||
StringBuilder builder = new StringBuilder("");
|
||||
if (options.size() > 0) {
|
||||
builder.append("?");
|
||||
for (Iterator<Entry<String, String>> i = options.entrySet()
|
||||
.iterator(); i.hasNext();) {
|
||||
Entry<String, String> entry = i.next();
|
||||
builder.append(entry.getKey()).append("=").append(
|
||||
entry.getValue());
|
||||
if (i.hasNext())
|
||||
builder.append("&");
|
||||
}
|
||||
}
|
||||
String returnVal = builder.toString();
|
||||
return returnVal;
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public String buildQueryString() {
|
||||
StringBuilder builder = new StringBuilder("");
|
||||
if (parameters.size() > 0) {
|
||||
builder.append("?");
|
||||
for (Iterator<Entry<String, String>> i = parameters.entrySet().iterator(); i.hasNext();) {
|
||||
Entry<String, String> entry = i.next();
|
||||
builder.append(entry.getKey()).append("=").append(entry.getValue());
|
||||
if (i.hasNext())
|
||||
builder.append("&");
|
||||
}
|
||||
}
|
||||
String returnVal = builder.toString();
|
||||
return returnVal;
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue