updated perftest to use amazon sdk and also stop using httpnio

This commit is contained in:
Adrian Cole 2010-07-21 11:50:06 -07:00
parent 7c2b014f25
commit 5bd3eb0571
29 changed files with 303 additions and 4292 deletions

38
aws/perftest/README.txt Normal file
View File

@ -0,0 +1,38 @@
This sample uses the Google App Engine for Java SDK located at http://googleappengine.googlecode.com/files/appengine-java-sdk-1.3.5.zip
Please unzip the above file and modify your maven settings.xml like below before attempting to run 'mvn -Plive install'
<profile>
<id>appengine</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<appengine.home>/path/to/appengine-java-sdk-1.3.5</appengine.home>
</properties>
</profile>
<profile>
<id>aws</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<jclouds.aws.accesskeyid>YOUR_ACCESS_KEY_ID</jclouds.aws.accesskeyid>
<jclouds.aws.secretaccesskey>YOUR_SECRET_KEY</jclouds.aws.secretaccesskey>
</properties>
</profile>
<repositories>
<repository>
<id>jclouds</id>
<url>http://jclouds.googlecode.com/svn/trunk/repo</url>
</repository>
<repository>
<id>jclouds-rimu-snapshots-nexus</id>
<url>http://jclouds.rimuhosting.com:8081/nexus/content/repositories/snapshots</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>

View File

@ -1,40 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!-- <!--
Copyright (C) 2009 Cloud Conscious, LLC. <info@cloudconscious.com> Copyright (C) 2009 Cloud Conscious, LLC. <info@cloudconscious.com>
==================================================================== ====================================================================
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License"); you
you may not use this file except in compliance with the License. may not use this file except in compliance with the License. You may
You may obtain a copy of the License at obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 http://www.apache.org/licenses/LICENSE-2.0 Unless required by
applicable law or agreed to in writing, software distributed under the
Unless required by applicable law or agreed to in writing, software License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
distributed under the License is distributed on an "AS IS" BASIS, CONDITIONS OF ANY KIND, either express or implied. See the License for
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. the specific language governing permissions and limitations under the
See the License for the specific language governing permissions and License.
limitations under the License.
==================================================================== ====================================================================
-->
<!--
Note that the code that these performance tests are evaluated against
exist in src/main. The code in that location was copied straight from
Amazon's website. There have been small modifications to make unit
testing possible. That code exists with the following license: This
software code is made available "AS IS" without warranties of any
kind. You may copy, display, modify and redistribute the software code
either by itself or as incorporated into your code; provided that you
do not remove any proprietary notices. Your use of this software code
is at your own risk and you waive any claim against Amazon Digital
Services, Inc. or its affiliates with respect to your use of this
software code. (c) 2006 Amazon Digital Services, Inc. or its
affiliates.
--> -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<groupId>org.jclouds</groupId> <groupId>org.jclouds</groupId>
@ -42,8 +26,19 @@
<version>1.0-SNAPSHOT</version> <version>1.0-SNAPSHOT</version>
</parent> </parent>
<artifactId>jclouds-aws-perftest</artifactId> <artifactId>jclouds-aws-perftest</artifactId>
<name>jclouds Performance test verses Amazon samples implementation</name> <name>jclouds Performance test verses Amazon SDK implementation</name>
<description>Performance test verses Amazon samples implementation</description> <description>Performance test verses Amazon SDK implementation</description>
<properties>
<!--
note you must set the property ${appengine.home} to a valid
extraction of appengine-java-sdk
-->
<appengine.home>YOUR_APPENGINE_HOME</appengine.home>
<jclouds.test.loopcount>100</jclouds.test.loopcount>
<jclouds.test.identity>${jclouds.aws.accesskeyid}</jclouds.test.identity>
<jclouds.test.credential>${jclouds.aws.secretaccesskey}</jclouds.test.credential>
<jclouds.test.initializer>org.jclouds.aws.s3.blobstore.integration.S3TestInitializer</jclouds.test.initializer>
</properties>
<dependencies> <dependencies>
<dependency> <dependency>
@ -56,10 +51,15 @@
<artifactId>jclouds-aws</artifactId> <artifactId>jclouds-aws</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk</artifactId>
<version>1.0.007</version>
</dependency>
<dependency> <dependency>
<groupId>net.java.dev.jets3t</groupId> <groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId> <artifactId>jets3t</artifactId>
<version>0.7.1</version> <version>0.7.4</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>${project.groupId}</groupId> <groupId>${project.groupId}</groupId>
@ -68,7 +68,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>${project.groupId}</groupId> <groupId>${project.groupId}</groupId>
<artifactId>jclouds-httpnio</artifactId> <artifactId>jclouds-apachehc</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency> <dependency>
@ -76,17 +76,30 @@
<artifactId>jclouds-gae</artifactId> <artifactId>jclouds-gae</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<!-- start gae dependencies --> <dependency>
<groupId>${project.groupId}</groupId>
<artifactId>jclouds-aws</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<!-- start gae stuff -->
<dependency> <dependency>
<groupId>com.google.appengine</groupId> <groupId>com.google.appengine</groupId>
<artifactId>appengine-api-stubs</artifactId> <artifactId>appengine-api-stubs</artifactId>
<version>1.3.0</version> <version>1.3.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.appengine</groupId>
<artifactId>appengine-testing</artifactId>
<version>1.3.5</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.google.appengine</groupId> <groupId>com.google.appengine</groupId>
<artifactId>appengine-local-runtime</artifactId> <artifactId>appengine-local-runtime</artifactId>
<version>1.3.0</version> <version>1.3.5</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
@ -95,12 +108,10 @@
<version>${project.version}</version> <version>${project.version}</version>
<type>test-jar</type> <type>test-jar</type>
<scope>test</scope> <scope>test</scope>
<exclusions> <!--
<exclusion> <exclusions> <exclusion> <groupId>org.mortbay.jetty</groupId>
<groupId>org.mortbay.jetty</groupId> <artifactId>jetty</artifactId> </exclusion> </exclusions>
<artifactId>jetty</artifactId> -->
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.geronimo.specs</groupId> <groupId>org.apache.geronimo.specs</groupId>
@ -108,14 +119,7 @@
<version>1.2</version> <version>1.2</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- end gae dependencies -->
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>jclouds-aws</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<scm> <scm>
@ -137,6 +141,24 @@
</goals> </goals>
<configuration> <configuration>
<threadCount>1</threadCount> <threadCount>1</threadCount>
<systemProperties>
<property>
<name>jclouds.test.identity</name>
<value>${jclouds.test.identity}</value>
</property>
<property>
<name>jclouds.test.credential</name>
<value>${jclouds.test.credential}</value>
</property>
<property>
<name>jclouds.test.initializer</name>
<value>${jclouds.test.initializer}</value>
</property>
<property>
<name>jclouds.test.loopcount</name>
<value>${jclouds.test.loopcount}</value>
</property>
</systemProperties>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>

View File

@ -1,159 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
// affiliates.
import java.util.Map;
import java.util.TreeMap;
import java.util.Arrays;
import com.amazon.s3.AWSAuthConnection;
import com.amazon.s3.CallingFormat;
import com.amazon.s3.QueryStringAuthGenerator;
import com.amazon.s3.S3Object;
public class S3Driver {
static final String awsAccessKeyId = "<INSERT YOUR AWS ACCESS KEY ID HERE>";
static final String awsSecretAccessKey = "<INSERT YOUR AWS SECRET ACCESS KEY HERE>";
// convert the bucket to lowercase for vanity domains
// the bucket name must be lowercase since DNS is case-insensitive
static final String bucketName = awsAccessKeyId.toLowerCase() + "-test-bucket";
static final String keyName = "test-key";
static final String copiedKeyName = "copy-of-" + keyName;
public static void main(String args[]) throws Exception {
if (awsAccessKeyId.startsWith("<INSERT")) {
System.err.println("Please examine S3Driver.java and update it with your credentials");
System.exit(-1);
}
AWSAuthConnection conn =
new AWSAuthConnection(awsAccessKeyId, awsSecretAccessKey);
QueryStringAuthGenerator generator =
new QueryStringAuthGenerator(awsAccessKeyId, awsSecretAccessKey);
// Check if the bucket exists. The high availability engineering of
// Amazon S3 is focused on get, put, list, and delete operations.
// Because bucket operations work against a centralized, global
// resource space, it is not appropriate to make bucket create or
// delete calls on the high availability code path of your application.
// It is better to create or delete buckets in a separate initialization
// or setup routine that you run less often.
if (!conn.checkBucketExists(bucketName))
{
System.out.println("----- creating bucket -----");
System.out.println(conn.createBucket(bucketName, AWSAuthConnection.LOCATION_DEFAULT, null).connection.getResponseMessage());
// sample creating an EU located bucket.
// (note path-style urls will not work with location-constrained buckets)
//System.out.println(conn.createBucket(bucketName, AWSAuthConnection.LOCATION_EU, null).connection.getResponseMessage());
}
System.out.println("----- listing bucket -----");
System.out.println(conn.listBucket(bucketName, null, null, null, null).entries);
System.out.println("----- bucket location -----");
System.out.println(conn.getBucketLocation(bucketName).getLocation());
System.out.println("----- putting object -----");
S3Object object = new S3Object("this is a test".getBytes(), null);
Map headers = new TreeMap();
headers.put("Content-Type", Arrays.asList(new String[] { "text/plain" }));
System.out.println(
conn.put(bucketName, keyName, object, headers).connection.getResponseMessage()
);
System.out.println("----- copying object -----");
// Straight Copy; destination key will be private.
conn.copy( bucketName, keyName, bucketName, copiedKeyName, null );
{
// Update the metadata; destination key will be private.
Map updateMetadata = new TreeMap();
updateMetadata.put("metadata-key", Arrays.asList("this will be the metadata in the copied key"));
conn.copy( bucketName, copiedKeyName, bucketName, copiedKeyName, updateMetadata, null );
}
System.out.println("----- listing bucket -----");
System.out.println(conn.listBucket(bucketName, null, null, null, null).entries);
System.out.println("----- getting object -----");
System.out.println(
new String(conn.get(bucketName, keyName, null).object.data)
);
System.out.println("----- query string auth example -----");
generator.setExpiresIn(60 * 1000);
System.out.println("Try this url in your web browser (it will only work for 60 seconds)\n");
System.out.println(generator.get(bucketName, keyName, null));
System.out.print("\npress enter> ");
System.in.read();
System.out.println("\nNow try just the url without the query string arguments. It should fail.\n");
System.out.println(generator.makeBareURL(bucketName, keyName));
System.out.print("\npress enter> ");
System.in.read();
System.out.println("----- putting object with metadata and public read acl -----");
Map metadata = new TreeMap();
metadata.put("blah", Arrays.asList(new String[] { "foo" }));
object = new S3Object("this is a publicly readable test".getBytes(), metadata);
headers = new TreeMap();
headers.put("x-amz-acl", Arrays.asList(new String[] { "public-read" }));
headers.put("Content-Type", Arrays.asList(new String[] { "text/plain" }));
System.out.println(
conn.put(bucketName, keyName + "-public", object, headers).connection.getResponseMessage()
);
System.out.println("----- anonymous read test -----");
System.out.println("\nYou should be able to try this in your browser\n");
System.out.println(generator.makeBareURL(bucketName, keyName + "-public"));
System.out.print("\npress enter> ");
System.in.read();
System.out.println("----- path style url example -----");
System.out.println("\nNon-location-constrained buckets can also be specified as part of the url path. (This was the original url style supported by S3.)");
System.out.println("\nTry this url out in your browser (it will only be valid for 60 seconds)\n");
generator.setCallingFormat(CallingFormat.getPathCallingFormat());
// could also have been done like this:
// generator = new QueryStringAuthGenerator(awsAccessKeyId, awsSecretAccessKey, true, Utils.DEFAULT_HOST, CallingFormat.getPathCallingFormat());
generator.setExpiresIn(60 * 1000);
System.out.println(generator.get(bucketName, keyName, null));
System.out.print("\npress enter> ");
System.in.read();
System.out.println("----- getting object's acl -----");
System.out.println(new String(conn.getACL(bucketName, keyName, null).object.data));
System.out.println("----- deleting objects -----");
System.out.println(
conn.delete(bucketName, copiedKeyName, null).connection.getResponseMessage()
);
System.out.println(
conn.delete(bucketName, keyName, null).connection.getResponseMessage()
);
System.out.println(
conn.delete(bucketName, keyName + "-public", null).connection.getResponseMessage()
);
System.out.println("----- listing bucket -----");
System.out.println(conn.listBucket(bucketName, null, null, null, null).entries);
System.out.println("----- listing all my buckets -----");
System.out.println(conn.listAllMyBuckets(null).entries);
System.out.println("----- deleting bucket -----");
System.out.println(
conn.deleteBucket(bucketName, null).connection.getResponseMessage()
);
}
}

View File

@ -1,633 +0,0 @@
/**
*
* Copyright (C) 2009 Adrian Cole <adriancole@jclouds.org>
*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*/
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.io.IOException;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
/**
* An interface into the S3 system. It is initially configured with
* authentication and connection parameters and exposes methods to access and
* manipulate S3 data.
*/
public class AWSAuthConnection {
public static final String LOCATION_DEFAULT = null;
public static final String LOCATION_EU = "EU";
private String awsAccessKeyId;
private String awsSecretAccessKey;
private boolean isSecure;
private String server;
private int port;
private CallingFormat callingFormat;
public AWSAuthConnection(String awsAccessKeyId, String awsSecretAccessKey) {
this(awsAccessKeyId, awsSecretAccessKey, true);
}
public AWSAuthConnection(String awsAccessKeyId, String awsSecretAccessKey, boolean isSecure) {
this(awsAccessKeyId, awsSecretAccessKey, isSecure, Utils.DEFAULT_HOST);
}
public AWSAuthConnection(String awsAccessKeyId, String awsSecretAccessKey, boolean isSecure,
String server)
{
this(awsAccessKeyId, awsSecretAccessKey, isSecure, server,
isSecure ? Utils.SECURE_PORT : Utils.INSECURE_PORT);
}
public AWSAuthConnection(String awsAccessKeyId, String awsSecretAccessKey, boolean isSecure,
String server, int port) {
this(awsAccessKeyId, awsSecretAccessKey, isSecure, server, port, CallingFormat.getSubdomainCallingFormat());
}
public AWSAuthConnection(String awsAccessKeyId, String awsSecretAccessKey, boolean isSecure,
String server, CallingFormat format) {
this(awsAccessKeyId, awsSecretAccessKey, isSecure, server,
isSecure ? Utils.SECURE_PORT : Utils.INSECURE_PORT,
format);
}
/**
* Create a new interface to interact with S3 with the given credential and connection
* parameters
*
* @param awsAccessKeyId Your user key into AWS
* @param awsSecretAccessKey The secret string used to generate signatures for authentication.
* @param isSecure use SSL encryption
* @param server Which host to connect to. Usually, this will be s3.amazonaws.com
* @param port Which port to use.
* @param callingFormat Type of request Regular/Vanity or Pure Vanity domain
*/
public AWSAuthConnection(String awsAccessKeyId, String awsSecretAccessKey, boolean isSecure,
String server, int port, CallingFormat format)
{
this.awsAccessKeyId = awsAccessKeyId;
this.awsSecretAccessKey = awsSecretAccessKey;
this.isSecure = isSecure;
this.server = server;
this.port = port;
this.callingFormat = format;
}
/**
* Creates a new bucket.
* @param bucket The name of the bucket to create.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
* @param metadata A Map of String to List of Strings representing the s3
* metadata for this bucket (can be null).
* @deprecated use version that specifies location
*/
public Response createBucket(String bucket, Map headers)
throws MalformedURLException, IOException
{
return createBucket(bucket, null, headers);
}
/**
* Creates a new bucket.
* @param bucket The name of the bucket to create.
* @param location Desired location ("EU") (or null for default).
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
* @param metadata A Map of String to List of Strings representing the s3
* metadata for this bucket (can be null).
* @throws IllegalArgumentException on invalid location
*/
public Response createBucket(String bucket, String location, Map headers)
throws MalformedURLException, IOException
{
String body;
if (location == null) {
body = null;
} else if (LOCATION_EU.equals(location)) {
if (!callingFormat.supportsLocatedBuckets())
throw new IllegalArgumentException("Creating location-constrained bucket with unsupported calling-format");
body = "<CreateBucketConstraint><LocationConstraint>" + location + "</LocationConstraint></CreateBucketConstraint>";
} else
throw new IllegalArgumentException("Invalid Location: "+location);
// validate bucket name
if (!Utils.validateBucketName(bucket, callingFormat, location != null))
throw new IllegalArgumentException("Invalid S3Bucket Name: "+bucket);
HttpURLConnection request = makeRequest("PUT", bucket, "", null, headers);
if (body != null)
{
request.setDoOutput(true);
request.getOutputStream().write(body.getBytes("UTF-8"));
}
return new Response(request);
}
/**
* Check if the specified bucket exists (via a HEAD request)
* @param bucket The name of the bucket to check
* @return true if HEAD access returned success
*/
public boolean checkBucketExists(String bucket) throws MalformedURLException, IOException
{
HttpURLConnection response = makeRequest("HEAD", bucket, "", null, null);
int httpCode = response.getResponseCode();
return httpCode >= 200 && httpCode < 300;
}
/**
* Lists the contents of a bucket.
* @param bucket The name of the bucket to create.
* @param prefix All returned keys will start with this string (can be null).
* @param marker All returned keys will be lexographically greater than
* this string (can be null).
* @param maxKeys The maximum number of keys to return (can be null).
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public ListBucketResponse listBucket(String bucket, String prefix, String marker,
Integer maxKeys, Map headers)
throws MalformedURLException, IOException
{
return listBucket(bucket, prefix, marker, maxKeys, null, headers);
}
/**
* Lists the contents of a bucket.
* @param bucket The name of the bucket to list.
* @param prefix All returned keys will start with this string (can be null).
* @param marker All returned keys will be lexographically greater than
* this string (can be null).
* @param maxKeys The maximum number of keys to return (can be null).
* @param delimiter Keys that contain a string between the prefix and the first
* occurrence of the delimiter will be rolled up into a single element.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public ListBucketResponse listBucket(String bucket, String prefix, String marker,
Integer maxKeys, String delimiter, Map headers)
throws MalformedURLException, IOException
{
Map pathArgs = Utils.paramsForListOptions(prefix, marker, maxKeys, delimiter);
return new ListBucketResponse(makeRequest("GET", bucket, "", pathArgs, headers));
}
/**
* Deletes a bucket.
* @param bucket The name of the bucket to delete.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public Response deleteBucket(String bucket, Map headers)
throws MalformedURLException, IOException
{
return new Response(makeRequest("DELETE", bucket, "", null, headers));
}
/**
* Writes an object to S3.
* @param bucket The name of the bucket to which the object will be added.
* @param key The name of the key to use.
* @param object An S3Object containing the data to write.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public Response put(String bucket, String key, S3Object object, Map headers)
throws MalformedURLException, IOException
{
HttpURLConnection request =
makeRequest("PUT", bucket, Utils.urlencode(key), null, headers, object);
request.setDoOutput(true);
request.getOutputStream().write(object.data == null ? new byte[] {} : object.data);
return new Response(request);
}
/**
* Creates a copy of an existing S3 Object. In this signature, we will copy the
* existing metadata. The default access control policy is private; if you want
* to override it, please use x-amz-acl in the headers.
* @param sourceBucket The name of the bucket where the source object lives.
* @param sourceKey The name of the key to copy.
* @param destinationBucket The name of the bucket to which the object will be added.
* @param destinationKey The name of the key to use.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null). You may wish to set the x-amz-acl header appropriately.
*/
public Response copy( String sourceBucket, String sourceKey, String destinationBucket, String destinationKey, Map headers )
throws MalformedURLException, IOException
{
S3Object object = new S3Object(new byte[] {}, new HashMap());
headers = headers == null ? new HashMap() : new HashMap(headers);
headers.put("x-amz-copy-source", Arrays.asList( new String[] { sourceBucket + "/" + sourceKey } ) );
headers.put("x-amz-metadata-directive", Arrays.asList( new String[] { "COPY" } ) );
return verifyCopy( put( destinationBucket, destinationKey, object, headers ) );
}
/**
* Creates a copy of an existing S3 Object. In this signature, we will replace the
* existing metadata. The default access control policy is private; if you want
* to override it, please use x-amz-acl in the headers.
* @param sourceBucket The name of the bucket where the source object lives.
* @param sourceKey The name of the key to copy.
* @param destinationBucket The name of the bucket to which the object will be added.
* @param destinationKey The name of the key to use.
* @param metadata A Map of String to List of Strings representing the S3 metadata
* for the new object.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null). You may wish to set the x-amz-acl header appropriately.
*/
public Response copy( String sourceBucket, String sourceKey, String destinationBucket, String destinationKey, Map metadata, Map headers )
throws MalformedURLException, IOException
{
S3Object object = new S3Object(new byte[] {}, metadata);
headers = headers == null ? new HashMap() : new HashMap(headers);
headers.put("x-amz-copy-source", Arrays.asList( new String[] { sourceBucket + "/" + sourceKey } ) );
headers.put("x-amz-metadata-directive", Arrays.asList( new String[] { "REPLACE" } ) );
return verifyCopy( put( destinationBucket, destinationKey, object, headers ) );
}
/**
* Copy sometimes returns a successful response and starts to send whitespace
* characters to us. This method processes those whitespace characters and
* will throw an exception if the response is either unknown or an error.
* @param response Response object from the PUT request.
* @return The response with the input stream drained.
* @throws IOException If anything goes wrong.
*/
private Response verifyCopy( Response response ) throws IOException {
if (response.connection.getResponseCode() < 400) {
byte[] body = GetResponse.slurpInputStream(response.connection.getInputStream());
String message = new String( body );
if ( message.indexOf( "<Error" ) != -1 ) {
throw new IOException( message.substring( message.indexOf( "<Error" ) ) );
} else if ( message.indexOf( "</CopyObjectResult>" ) != -1 ) {
// It worked!
} else {
throw new IOException( "Unexpected response: " + message );
}
}
return response;
}
/**
* Reads an object from S3.
* @param bucket The name of the bucket where the object lives.
* @param key The name of the key to use.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public GetResponse get(String bucket, String key, Map headers)
throws MalformedURLException, IOException
{
return new GetResponse(makeRequest("GET", bucket, Utils.urlencode(key), null, headers));
}
/**
* Deletes an object from S3.
* @param bucket The name of the bucket where the object lives.
* @param key The name of the key to use.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public Response delete(String bucket, String key, Map headers)
throws MalformedURLException, IOException
{
return new Response(makeRequest("DELETE", bucket, Utils.urlencode(key), null, headers));
}
/**
* Get the requestPayment xml document for a given bucket
* @param bucket The name of the bucket
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public GetResponse getBucketRequestPayment(String bucket, Map headers)
throws MalformedURLException, IOException
{
Map pathArgs = new HashMap();
pathArgs.put("requestPayment", null);
return new GetResponse(makeRequest("GET", bucket, "", pathArgs, headers));
}
/**
* Write a new requestPayment xml document for a given bucket
* @param loggingXMLDoc The xml representation of the requestPayment configuration as a String
* @param bucket The name of the bucket
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public Response putBucketRequestPayment(String bucket, String requestPaymentXMLDoc, Map headers)
throws MalformedURLException, IOException
{
Map pathArgs = new HashMap();
pathArgs.put("requestPayment", null);
S3Object object = new S3Object(requestPaymentXMLDoc.getBytes(), null);
HttpURLConnection request = makeRequest("PUT", bucket, "", pathArgs, headers, object);
request.setDoOutput(true);
request.getOutputStream().write(object.data == null ? new byte[] {} : object.data);
return new Response(request);
}
/**
* Get the logging xml document for a given bucket
* @param bucket The name of the bucket
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public GetResponse getBucketLogging(String bucket, Map headers)
throws MalformedURLException, IOException
{
Map pathArgs = new HashMap();
pathArgs.put("logging", null);
return new GetResponse(makeRequest("GET", bucket, "", pathArgs, headers));
}
/**
* Write a new logging xml document for a given bucket
* @param loggingXMLDoc The xml representation of the logging configuration as a String
* @param bucket The name of the bucket
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public Response putBucketLogging(String bucket, String loggingXMLDoc, Map headers)
throws MalformedURLException, IOException
{
Map pathArgs = new HashMap();
pathArgs.put("logging", null);
S3Object object = new S3Object(loggingXMLDoc.getBytes(), null);
HttpURLConnection request = makeRequest("PUT", bucket, "", pathArgs, headers, object);
request.setDoOutput(true);
request.getOutputStream().write(object.data == null ? new byte[] {} : object.data);
return new Response(request);
}
/**
* Get the ACL for a given bucket
* @param bucket The name of the bucket where the object lives.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public GetResponse getBucketACL(String bucket, Map headers)
throws MalformedURLException, IOException
{
return getACL(bucket, "", headers);
}
/**
* Get the ACL for a given object (or bucket, if key is null).
* @param bucket The name of the bucket where the object lives.
* @param key The name of the key to use.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public GetResponse getACL(String bucket, String key, Map headers)
throws MalformedURLException, IOException
{
if (key == null) key = "";
Map pathArgs = new HashMap();
pathArgs.put("acl", null);
return new GetResponse(
makeRequest("GET", bucket, Utils.urlencode(key), pathArgs, headers)
);
}
/**
* Write a new ACL for a given bucket
* @param aclXMLDoc The xml representation of the ACL as a String
* @param bucket The name of the bucket where the object lives.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public Response putBucketACL(String bucket, String aclXMLDoc, Map headers)
throws MalformedURLException, IOException
{
return putACL(bucket, "", aclXMLDoc, headers);
}
/**
* Write a new ACL for a given object
* @param aclXMLDoc The xml representation of the ACL as a String
* @param bucket The name of the bucket where the object lives.
* @param key The name of the key to use.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public Response putACL(String bucket, String key, String aclXMLDoc, Map headers)
throws MalformedURLException, IOException
{
S3Object object = new S3Object(aclXMLDoc.getBytes(), null);
Map pathArgs = new HashMap();
pathArgs.put("acl", null);
HttpURLConnection request =
makeRequest("PUT", bucket, Utils.urlencode(key), pathArgs, headers, object);
request.setDoOutput(true);
request.getOutputStream().write(object.data == null ? new byte[] {} : object.data);
return new Response(request);
}
public LocationResponse getBucketLocation(String bucket)
throws MalformedURLException, IOException
{
Map pathArgs = new HashMap();
pathArgs.put("location", null);
return new LocationResponse(makeRequest("GET", bucket, "", pathArgs, null));
}
/**
* List all the buckets created by this account.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
public ListAllMyBucketsResponse listAllMyBuckets(Map headers)
throws MalformedURLException, IOException
{
return new ListAllMyBucketsResponse(makeRequest("GET", "", "", null, headers));
}
/**
* Make a new HttpURLConnection without passing an S3Object parameter.
* Use this method for key operations that do require arguments
* @param method The method to invoke
* @param bucketName the bucket this request is for
* @param key the key this request is for
* @param pathArgs the
* @param headers
* @return
* @throws MalformedURLException
* @throws IOException
*/
private HttpURLConnection makeRequest(String method, String bucketName, String key, Map pathArgs, Map headers)
throws MalformedURLException, IOException
{
return makeRequest(method, bucketName, key, pathArgs, headers, null);
}
/**
* Make a new HttpURLConnection.
* @param method The HTTP method to use (GET, PUT, DELETE)
* @param bucketName The bucket name this request affects
* @param key The key this request is for
* @param pathArgs parameters if any to be sent along this request
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
* @param object The S3Object that is to be written (can be null).
*/
private HttpURLConnection makeRequest(String method, String bucket, String key, Map pathArgs, Map headers,
S3Object object)
throws MalformedURLException, IOException
{
CallingFormat callingFormat = Utils.getCallingFormatForBucket( this.callingFormat, bucket );
if ( isSecure && callingFormat != CallingFormat.getPathCallingFormat() && bucket.indexOf( "." ) != -1 ) {
System.err.println( "You are making an SSL connection, however, the bucket contains periods and the wildcard certificate will not match by default. Please consider using HTTP." );
}
// build the domain based on the calling format
URL url = callingFormat.getURL(isSecure, server, this.port, bucket, key, pathArgs);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod(method);
// subdomain-style urls may encounter http redirects.
// Ensure that redirects are supported.
if (!connection.getInstanceFollowRedirects()
&& callingFormat.supportsLocatedBuckets())
throw new RuntimeException("HTTP redirect support required.");
addHeaders(connection, headers);
if (object != null) addMetadataHeaders(connection, object.metadata);
addAuthHeader(connection, method, bucket, key, pathArgs);
return connection;
}
/**
* Add the given headers to the HttpURLConnection.
* @param connection The HttpURLConnection to which the headers will be added.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
*/
private void addHeaders(HttpURLConnection connection, Map headers) {
addHeaders(connection, headers, "");
}
/**
* Add the given metadata fields to the HttpURLConnection.
* @param connection The HttpURLConnection to which the headers will be added.
* @param metadata A Map of String to List of Strings representing the s3
* metadata for this resource.
*/
private void addMetadataHeaders(HttpURLConnection connection, Map metadata) {
addHeaders(connection, metadata, Utils.METADATA_PREFIX);
}
/**
* Add the given headers to the HttpURLConnection with a prefix before the keys.
* @param connection The HttpURLConnection to which the headers will be added.
* @param headers A Map of String to List of Strings representing the http
* headers to pass (can be null).
* @param prefix The string to prepend to each key before adding it to the connection.
*/
private void addHeaders(HttpURLConnection connection, Map headers, String prefix) {
if (headers != null) {
for (Iterator i = headers.keySet().iterator(); i.hasNext(); ) {
String key = (String)i.next();
for (Iterator j = ((List)headers.get(key)).iterator(); j.hasNext(); ) {
String value = (String)j.next();
connection.addRequestProperty(prefix + key, value);
}
}
}
}
/**
* Add the appropriate Authorization header to the HttpURLConnection.
* @param connection The HttpURLConnection to which the header will be added.
* @param method The HTTP method to use (GET, PUT, DELETE)
* @param bucket the bucket name this request is for
* @param key the key this request is for
* @param pathArgs path arguments which are part of this request
*/
private void addAuthHeader(HttpURLConnection connection, String method, String bucket, String key, Map pathArgs) {
if (connection.getRequestProperty("Date") == null) {
connection.setRequestProperty("Date", httpDate());
}
if (connection.getRequestProperty("Content-Type") == null) {
connection.setRequestProperty("Content-Type", "");
}
String canonicalString =
Utils.makeCanonicalString(method, bucket, key, pathArgs, connection.getRequestProperties());
String encodedCanonical = Utils.encode(this.awsSecretAccessKey, canonicalString, false);
connection.setRequestProperty("Authorization",
"AWS " + this.awsAccessKeyId + ":" + encodedCanonical);
}
/**
* Generate an rfc822 date for use in the Date HTTP header.
*/
public static String httpDate() {
final String DateFormat = "EEE, dd MMM yyyy HH:mm:ss ";
SimpleDateFormat format = new SimpleDateFormat( DateFormat, Locale.US );
format.setTimeZone( TimeZone.getTimeZone( "GMT" ) );
return format.format( new Date() ) + "GMT";
}
}

View File

@ -1,41 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.util.Date;
/**
* A class representing a single bucket. Returned as a component of ListAllMyBucketsResponse.
*/
public class Bucket {
/**
* The name of the bucket.
*/
public String name;
/**
* The bucket's creation date.
*/
public Date creationDate;
public Bucket() {
this.name = null;
this.creationDate = null;
}
public Bucket(String name, Date creationDate) {
this.name = name;
this.creationDate = creationDate;
}
public String toString() {
return this.name;
}
}

View File

@ -1,103 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;
public abstract class CallingFormat {
protected static CallingFormat pathCallingFormat = new PathCallingFormat();
protected static CallingFormat subdomainCallingFormat = new SubdomainCallingFormat();
protected static CallingFormat vanityCallingFormat = new VanityCallingFormat();
public abstract boolean supportsLocatedBuckets();
public abstract String getEndpoint(String server, int port, String bucket);
public abstract String getPathBase(String bucket, String key);
public abstract URL getURL (boolean isSecure, String server, int port, String bucket, String key, Map pathArgs)
throws MalformedURLException;
public static CallingFormat getPathCallingFormat() {
return pathCallingFormat;
}
public static CallingFormat getSubdomainCallingFormat() {
return subdomainCallingFormat;
}
public static CallingFormat getVanityCallingFormat() {
return vanityCallingFormat;
}
static private class PathCallingFormat extends CallingFormat {
public boolean supportsLocatedBuckets() {
return false;
}
public String getPathBase(String bucket, String key) {
return isBucketSpecified(bucket) ? "/" + bucket + "/" + key : "/";
}
public String getEndpoint(String server, int port, String bucket) {
return server + ":" + port;
}
public URL getURL(boolean isSecure, String server, int port, String bucket, String key, Map pathArgs)
throws MalformedURLException {
String pathBase = isBucketSpecified(bucket) ? "/" + bucket + "/" + key : "/";
String pathArguments = Utils.convertPathArgsHashToString(pathArgs);
return new URL(isSecure ? "https": "http", server, port, pathBase + pathArguments);
}
private boolean isBucketSpecified(String bucket) {
if(bucket == null) return false;
if(bucket.length() == 0) return false;
return true;
}
}
static private class SubdomainCallingFormat extends CallingFormat {
public boolean supportsLocatedBuckets() {
return true;
}
public String getServer(String server, String bucket) {
return bucket + "." + server;
}
public String getEndpoint(String server, int port, String bucket) {
return getServer(server, bucket) + ":" + port ;
}
public String getPathBase(String bucket, String key) {
return "/" + key;
}
public URL getURL(boolean isSecure, String server, int port, String bucket, String key, Map pathArgs)
throws MalformedURLException {
if (bucket == null || bucket.length() == 0)
{
//The bucket is null, this is listAllBuckets request
String pathArguments = Utils.convertPathArgsHashToString(pathArgs);
return new URL(isSecure ? "https": "http", server, port, "/" + pathArguments);
} else {
String serverToUse = getServer(server, bucket);
String pathBase = getPathBase(bucket, key);
String pathArguments = Utils.convertPathArgsHashToString(pathArgs);
return new URL(isSecure ? "https": "http", serverToUse, port, pathBase + pathArguments);
}
}
}
static private class VanityCallingFormat extends SubdomainCallingFormat {
public String getServer(String server, String bucket) {
return bucket;
}
}
}

View File

@ -1,17 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
public class CommonPrefixEntry {
/**
* The prefix common to the delimited keys it represents
*/
public String prefix;
}

View File

@ -1,70 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.net.HttpURLConnection;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
/**
* A Response object returned from AWSAuthConnection.get(). Exposes the attribute object, which
* represents the retrieved object.
*/
public class GetResponse extends Response {
public S3Object object;
/**
* Pulls a representation of an S3Object out of the HttpURLConnection response.
*/
public GetResponse(HttpURLConnection connection) throws IOException {
super(connection);
if (connection.getResponseCode() < 400) {
Map metadata = extractMetadata(connection);
byte[] body = slurpInputStream(connection.getInputStream());
this.object = new S3Object(body, metadata);
}
}
/**
* Examines the response's header fields and returns a Map from String to List of Strings
* representing the object's metadata.
*/
private Map extractMetadata(HttpURLConnection connection) {
TreeMap metadata = new TreeMap();
Map headers = connection.getHeaderFields();
for (Iterator i = headers.keySet().iterator(); i.hasNext(); ) {
String key = (String)i.next();
if (key == null) continue;
if (key.startsWith(Utils.METADATA_PREFIX)) {
metadata.put(key.substring(Utils.METADATA_PREFIX.length()), headers.get(key));
}
}
return metadata;
}
/**
* Read the input stream and dump it all into a big byte array
*/
static byte[] slurpInputStream(InputStream stream) throws IOException {
final int chunkSize = 2048;
byte[] buf = new byte[chunkSize];
ByteArrayOutputStream byteStream = new ByteArrayOutputStream(chunkSize);
int count;
while ((count=stream.read(buf)) != -1) byteStream.write(buf, 0, count);
return byteStream.toByteArray();
}
}

View File

@ -1,106 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.SimpleTimeZone;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
/**
* Returned by AWSAuthConnection.listAllMyBuckets().
*/
public class ListAllMyBucketsResponse extends Response {
/**
* A list of Bucket objects, one for each of this account's buckets. Will be null if
* the request fails.
*/
public List entries;
public ListAllMyBucketsResponse(HttpURLConnection connection) throws IOException {
super(connection);
if (connection.getResponseCode() < 400) {
try {
XMLReader xr = Utils.createXMLReader();;
ListAllMyBucketsHandler handler = new ListAllMyBucketsHandler();
xr.setContentHandler(handler);
xr.setErrorHandler(handler);
xr.parse(new InputSource(connection.getInputStream()));
this.entries = handler.getEntries();
} catch (SAXException e) {
throw new RuntimeException("Unexpected error parsing ListAllMyBuckets xml", e);
}
}
}
static class ListAllMyBucketsHandler extends DefaultHandler {
private List entries = null;
private Bucket currBucket = null;
private StringBuffer currText = null;
private SimpleDateFormat iso8601Parser = null;
public ListAllMyBucketsHandler() {
super();
entries = new ArrayList();
this.iso8601Parser = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
this.iso8601Parser.setTimeZone(new SimpleTimeZone(0, "GMT"));
this.currText = new StringBuffer();
}
public void startDocument() {
// ignore
}
public void endDocument() {
// ignore
}
public void startElement(String uri, String name, String qName, Attributes attrs) {
if (name.equals("Bucket")) {
this.currBucket = new Bucket();
}
}
public void endElement(String uri, String name, String qName) {
if (name.equals("Bucket")) {
this.entries.add(this.currBucket);
} else if (name.equals("Name")) {
this.currBucket.name = this.currText.toString();
} else if (name.equals("CreationDate")) {
try {
this.currBucket.creationDate = this.iso8601Parser.parse(this.currText.toString());
} catch (ParseException e) {
throw new RuntimeException("Unexpected date format in list bucket output", e);
}
}
this.currText = new StringBuffer();
}
public void characters(char ch[], int start, int length) {
this.currText.append(ch, start, length);
}
public List getEntries() {
return this.entries;
}
}
}

View File

@ -1,243 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.SimpleTimeZone;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.xml.sax.SAXException;
/**
* Returned by AWSAuthConnection.listBucket()
*/
public class ListBucketResponse extends Response {
/**
* The name of the bucket being listed. Null if request fails.
*/
public String name = null;
/**
* The prefix echoed back from the request. Null if request fails.
*/
public String prefix = null;
/**
* The marker echoed back from the request. Null if request fails.
*/
public String marker = null;
/**
* The delimiter echoed back from the request. Null if not specified in
* the request, or if it fails.
*/
public String delimiter = null;
/**
* The maxKeys echoed back from the request if specified. 0 if request fails.
*/
public int maxKeys = 0;
/**
* Indicates if there are more results to the list. True if the current
* list results have been truncated. false if request fails.
*/
public boolean isTruncated = false;
/**
* Indicates what to use as a marker for subsequent list requests in the event
* that the results are truncated. Present only when a delimiter is specified.
* Null if request fails.
*/
public String nextMarker = null;
/**
* A List of ListEntry objects representing the objects in the given bucket.
* Null if the request fails.
*/
public List entries = null;
/**
* A List of CommonPrefixEntry objects representing the common prefixes of the
* keys that matched up to the delimiter. Null if the request fails.
*/
public List commonPrefixEntries = null;
public ListBucketResponse(HttpURLConnection connection) throws IOException {
super(connection);
if (connection.getResponseCode() < 400) {
try {
XMLReader xr = Utils.createXMLReader();
ListBucketHandler handler = new ListBucketHandler();
xr.setContentHandler(handler);
xr.setErrorHandler(handler);
xr.parse(new InputSource(connection.getInputStream()));
this.name = handler.getName();
this.prefix = handler.getPrefix();
this.marker = handler.getMarker();
this.delimiter = handler.getDelimiter();
this.maxKeys = handler.getMaxKeys();
this.isTruncated = handler.getIsTruncated();
this.nextMarker = handler.getNextMarker();
this.entries = handler.getKeyEntries();
this.commonPrefixEntries = handler.getCommonPrefixEntries();
} catch (SAXException e) {
throw new RuntimeException("Unexpected error parsing ListBucket xml", e);
}
}
}
class ListBucketHandler extends DefaultHandler {
private String name = null;
private String prefix = null;
private String marker = null;
private String delimiter = null;
private int maxKeys = 0;
private boolean isTruncated = false;
private String nextMarker = null;
private boolean isEchoedPrefix = false;
private List keyEntries = null;
private ListEntry keyEntry = null;
private List commonPrefixEntries = null;
private CommonPrefixEntry commonPrefixEntry = null;
private StringBuffer currText = null;
private SimpleDateFormat iso8601Parser = null;
public ListBucketHandler() {
super();
keyEntries = new ArrayList();
commonPrefixEntries = new ArrayList();
this.iso8601Parser = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
this.iso8601Parser.setTimeZone(new SimpleTimeZone(0, "GMT"));
this.currText = new StringBuffer();
}
public void startDocument() {
this.isEchoedPrefix = true;
}
public void endDocument() {
// ignore
}
public void startElement(String uri, String name, String qName, Attributes attrs) {
if (name.equals("Contents")) {
this.keyEntry = new ListEntry();
} else if (name.equals("Owner")) {
this.keyEntry.owner = new Owner();
} else if (name.equals("CommonPrefixes")){
this.commonPrefixEntry = new CommonPrefixEntry();
}
}
public void endElement(String uri, String name, String qName) {
if (name.equals("Name")) {
this.name = this.currText.toString();
}
// this prefix is the one we echo back from the request
else if (name.equals("Prefix") && this.isEchoedPrefix) {
this.prefix = this.currText.toString();
this.isEchoedPrefix = false;
} else if (name.equals("Marker")) {
this.marker = this.currText.toString();
} else if (name.equals("MaxKeys")) {
this.maxKeys = Integer.parseInt(this.currText.toString());
} else if (name.equals("Delimiter")) {
this.delimiter = this.currText.toString();
} else if (name.equals("IsTruncated")) {
this.isTruncated = Boolean.valueOf(this.currText.toString());
} else if (name.equals("NextMarker")) {
this.nextMarker = this.currText.toString();
} else if (name.equals("Contents")) {
this.keyEntries.add(this.keyEntry);
} else if (name.equals("Key")) {
this.keyEntry.key = this.currText.toString();
} else if (name.equals("LastModified")) {
try {
this.keyEntry.lastModified = this.iso8601Parser.parse(this.currText.toString());
} catch (ParseException e) {
throw new RuntimeException("Unexpected date format in list bucket output", e);
}
} else if (name.equals("ETag")) {
this.keyEntry.eTag = this.currText.toString();
} else if (name.equals("Size")) {
this.keyEntry.size = Long.parseLong(this.currText.toString());
} else if (name.equals("StorageClass")) {
this.keyEntry.storageClass = this.currText.toString();
} else if (name.equals("ID")) {
this.keyEntry.owner.id = this.currText.toString();
} else if (name.equals("DisplayName")) {
this.keyEntry.owner.displayName = this.currText.toString();
} else if (name.equals("CommonPrefixes")) {
this.commonPrefixEntries.add(this.commonPrefixEntry);
}
// this is the common prefix for keys that match up to the delimiter
else if (name.equals("Prefix")) {
this.commonPrefixEntry.prefix = this.currText.toString();
}
if(this.currText.length() != 0)
this.currText = new StringBuffer();
}
public void characters(char ch[], int start, int length) {
this.currText.append(ch, start, length);
}
public String getName() {
return this.name;
}
public String getPrefix() {
return this.prefix;
}
public String getMarker() {
return this.marker;
}
public String getDelimiter() {
return this.delimiter;
}
public int getMaxKeys(){
return this.maxKeys;
}
public boolean getIsTruncated() {
return this.isTruncated;
}
public String getNextMarker() {
return this.nextMarker;
}
public List getKeyEntries() {
return this.keyEntries;
}
public List getCommonPrefixEntries() {
return this.commonPrefixEntries;
}
}
}

View File

@ -1,51 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.util.Date;
/**
* A structure representing a single object stored in S3. Returned as a part of ListBucketResponse.
*/
public class ListEntry {
/**
* The name of the object
*/
public String key;
/**
* The date at which the object was last modified.
*/
public Date lastModified;
/**
* The object's ETag, which can be used for conditional GETs.
*/
public String eTag;
/**
* The size of the object in bytes.
*/
public long size;
/**
* The object's storage class
*/
public String storageClass;
/**
* The object's owner
*/
public Owner owner;
public String toString() {
return key;
}
}

View File

@ -1,89 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.io.IOException;
import java.net.HttpURLConnection;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
/**
* A Response object returned from AWSAuthConnection.getBucketLocation().
* Parses the response XML and exposes the location constraint
* via the geteLocation() method.
*/
public class LocationResponse extends Response {
String location;
/**
* Parse the response to a ?location query.
*/
public LocationResponse(HttpURLConnection connection) throws IOException {
super(connection);
if (connection.getResponseCode() < 400) {
try {
XMLReader xr = Utils.createXMLReader();;
LocationResponseHandler handler = new LocationResponseHandler();
xr.setContentHandler(handler);
xr.setErrorHandler(handler);
xr.parse(new InputSource(connection.getInputStream()));
this.location = handler.location;
} catch (SAXException e) {
throw new RuntimeException("Unexpected error parsing ListAllMyBuckets xml", e);
}
} else {
this.location = "<error>";
}
}
/**
* Report the location-constraint for a bucket.
* A value of null indicates an error;
* the empty string indicates no constraint;
* and any other value is an actual location constraint value.
*/
public String getLocation() {
return location;
}
/**
* Helper class to parse LocationConstraint response XML
*/
static class LocationResponseHandler extends DefaultHandler {
String location = null;
private StringBuffer currText = null;
public void startDocument() {
}
public void startElement(String uri, String name, String qName, Attributes attrs) {
if (name.equals("LocationConstraint")) {
this.currText = new StringBuffer();
}
}
public void endElement(String uri, String name, String qName) {
if (name.equals("LocationConstraint")) {
location = this.currText.toString();
this.currText = null;
}
}
public void characters(char ch[], int start, int length) {
if (currText != null)
this.currText.append(ch, start, length);
}
}
}

View File

@ -1,18 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
/**
* A structure representing the owner of an object, used as a part of ListEntry.
*/
public class Owner {
public String id;
public String displayName;
}

View File

@ -1,264 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/**
* This class mimics the behavior of AWSAuthConnection, except instead of actually performing
* the operation, QueryStringAuthGenerator will return URLs with query string parameters that
* can be used to do the same thing. These parameters include an expiration date, so that
* if you hand them off to someone else, they will only work for a limited amount of time.
*/
public class QueryStringAuthGenerator {
private String awsAccessKeyId;
private String awsSecretAccessKey;
private boolean isSecure;
private String server;
private int port;
private CallingFormat callingFormat;
private Long expiresIn = null;
private Long expires = null;
// by default, expire in 1 minute.
private static final Long DEFAULT_EXPIRES_IN = new Long(60 * 1000);
public QueryStringAuthGenerator(String awsAccessKeyId, String awsSecretAccessKey) {
this(awsAccessKeyId, awsSecretAccessKey, true);
}
public QueryStringAuthGenerator(String awsAccessKeyId, String awsSecretAccessKey,
boolean isSecure)
{
this(awsAccessKeyId, awsSecretAccessKey, isSecure, Utils.DEFAULT_HOST);
}
public QueryStringAuthGenerator(String awsAccessKeyId, String awsSecretAccessKey,
boolean isSecure, String server)
{
this(awsAccessKeyId, awsSecretAccessKey, isSecure, server,
isSecure ? Utils.SECURE_PORT : Utils.INSECURE_PORT);
}
public QueryStringAuthGenerator(String awsAccessKeyId, String awsSecretAccessKey,
boolean isSecure, String server, int port)
{
this(awsAccessKeyId, awsSecretAccessKey, isSecure, server,
port, CallingFormat.getSubdomainCallingFormat());
}
public QueryStringAuthGenerator(String awsAccessKeyId, String awsSecretAccessKey,
boolean isSecure, String server, CallingFormat callingFormat)
{
this(awsAccessKeyId, awsSecretAccessKey, isSecure, server,
isSecure ? Utils.SECURE_PORT : Utils.INSECURE_PORT,
callingFormat);
}
public QueryStringAuthGenerator(String awsAccessKeyId, String awsSecretAccessKey,
boolean isSecure, String server, int port, CallingFormat callingFormat)
{
this.awsAccessKeyId = awsAccessKeyId;
this.awsSecretAccessKey = awsSecretAccessKey;
this.isSecure = isSecure;
this.server = server;
this.port = port;
this.callingFormat = callingFormat;
this.expiresIn = DEFAULT_EXPIRES_IN;
this.expires = null;
}
public void setCallingFormat(CallingFormat format) {
this.callingFormat = format;
}
public void setExpires(long millisSinceEpoch) {
expires = new Long(millisSinceEpoch);
expiresIn = null;
}
public void setExpiresIn(long millis) {
expiresIn = new Long(millis);
expires = null;
}
public String createBucket(String bucket, Map headers)
{
// validate bucket name
if (!Utils.validateBucketName(bucket, callingFormat, false))
throw new IllegalArgumentException("Invalid S3Bucket Name: "+bucket);
Map pathArgs = new HashMap();
return generateURL("PUT", bucket, "", pathArgs, headers);
}
public String listBucket(String bucket, String prefix, String marker,
Integer maxKeys, Map headers){
return listBucket(bucket, prefix, marker, maxKeys, null, headers);
}
public String listBucket(String bucket, String prefix, String marker,
Integer maxKeys, String delimiter, Map headers)
{
Map pathArgs = Utils.paramsForListOptions(prefix, marker, maxKeys, delimiter);
return generateURL("GET", bucket, "", pathArgs, headers);
}
public String deleteBucket(String bucket, Map headers)
{
Map pathArgs = new HashMap();
return generateURL("DELETE", bucket, "", pathArgs, headers);
}
public String put(String bucket, String key, S3Object object, Map headers) {
Map metadata = null;
Map pathArgs = new HashMap();
if (object != null) {
metadata = object.metadata;
}
return generateURL("PUT", bucket, Utils.urlencode(key), pathArgs, mergeMeta(headers, metadata));
}
public String get(String bucket, String key, Map headers)
{
Map pathArgs = new HashMap();
return generateURL("GET", bucket, Utils.urlencode(key), pathArgs, headers);
}
public String delete(String bucket, String key, Map headers)
{
Map pathArgs = new HashMap();
return generateURL("DELETE", bucket, Utils.urlencode(key), pathArgs, headers);
}
public String getBucketLogging(String bucket, Map headers) {
Map pathArgs = new HashMap();
pathArgs.put("logging", null);
return generateURL("GET", bucket, "", pathArgs, headers);
}
public String putBucketLogging(String bucket, String loggingXMLDoc, Map headers) {
Map pathArgs = new HashMap();
pathArgs.put("logging", null);
return generateURL("PUT", bucket, "", pathArgs, headers);
}
public String getBucketACL(String bucket, Map headers) {
return getACL(bucket, "", headers);
}
public String getACL(String bucket, String key, Map headers)
{
Map pathArgs = new HashMap();
pathArgs.put("acl", null);
return generateURL("GET", bucket, Utils.urlencode(key), pathArgs, headers);
}
public String putBucketACL(String bucket, String aclXMLDoc, Map headers) {
return putACL(bucket, "", aclXMLDoc, headers);
}
public String putACL(String bucket, String key, String aclXMLDoc, Map headers)
{
Map pathArgs = new HashMap();
pathArgs.put("acl", null);
return generateURL("PUT", bucket, Utils.urlencode(key), pathArgs, headers);
}
public String listAllMyBuckets(Map headers)
{
Map pathArgs = new HashMap();
return generateURL("GET", "", "", pathArgs, headers);
}
public String makeBareURL(String bucket, String key) {
StringBuffer buffer = new StringBuffer();
if (this.isSecure) {
buffer.append("https://");
} else {
buffer.append("http://");
}
buffer.append(this.server).append(":").append(this.port).append("/").append(bucket);
buffer.append("/").append(Utils.urlencode(key));
return buffer.toString();
}
private String generateURL(String method, String bucketName, String key, Map pathArgs, Map headers)
{
long expires = 0L;
if (this.expiresIn != null) {
expires = System.currentTimeMillis() + this.expiresIn.longValue();
} else if (this.expires != null) {
expires = this.expires.longValue();
} else {
throw new RuntimeException("Illegal expires state");
}
// convert to seconds
expires /= 1000;
String canonicalString = Utils.makeCanonicalString(method, bucketName, key, pathArgs, headers, ""+expires);
String encodedCanonical = Utils.encode(this.awsSecretAccessKey, canonicalString, true);
pathArgs.put("Signature", encodedCanonical);
pathArgs.put("Expires", Long.toString(expires));
pathArgs.put("AWSAccessKeyId", this.awsAccessKeyId);
CallingFormat callingFormat = Utils.getCallingFormatForBucket( this.callingFormat, bucketName );
if ( isSecure && callingFormat != CallingFormat.getPathCallingFormat() && bucketName.indexOf( "." ) != -1 ) {
System.err.println( "You are making an SSL connection, however, the bucket contains periods and the wildcard certificate will not match by default. Please consider using HTTP." );
}
String returnString;
try {
URL url = callingFormat.getURL(isSecure, server, port, bucketName, key, pathArgs);
returnString = url.toString();
} catch (MalformedURLException e) {
returnString = "Exception generating url " + e;
}
return returnString;
}
private Map mergeMeta(Map headers, Map metadata) {
Map merged = new TreeMap();
if (headers != null) {
for (Iterator i = headers.keySet().iterator(); i.hasNext(); ) {
String key = (String)i.next();
merged.put(key, headers.get(key));
}
}
if (metadata != null) {
for (Iterator i = metadata.keySet().iterator(); i.hasNext(); ) {
String key = (String)i.next();
String metadataKey = Utils.METADATA_PREFIX + key;
if (merged.containsKey(metadataKey)) {
((List)merged.get(metadataKey)).addAll((List)metadata.get(key));
} else {
merged.put(metadataKey, metadata.get(key));
}
}
}
return merged;
}
}

View File

@ -1,25 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.net.HttpURLConnection;
import java.io.IOException;
/**
* The parent class of all other Responses. This class keeps track of the
* HttpURLConnection response.
*/
public class Response {
public HttpURLConnection connection;
public Response(HttpURLConnection connection) throws IOException {
this.connection = connection;
}
}

View File

@ -1,30 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.util.Map;
/**
* A representation of a single object stored in S3.
*/
public class S3Object {
public byte[] data;
/**
* A Map from String to List of Strings representing the object's metadata
*/
public Map metadata;
public S3Object(byte[] data, Map metadata) {
this.data = data;
this.metadata = metadata;
}
}

View File

@ -1,324 +0,0 @@
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;
import org.xml.sax.SAXException;
import com.amazon.thirdparty.Base64;
public class Utils {
static final String METADATA_PREFIX = "x-amz-meta-";
static final String AMAZON_HEADER_PREFIX = "x-amz-";
static final String ALTERNATIVE_DATE_HEADER = "x-amz-date";
public static final String DEFAULT_HOST = "s3.amazonaws.com";
public static final int SECURE_PORT = 443;
public static final int INSECURE_PORT = 80;
/**
* HMAC/SHA1 Algorithm per RFC 2104.
*/
private static final String HMAC_SHA1_ALGORITHM = "HmacSHA1";
static String makeCanonicalString(String method, String bucket, String key, Map pathArgs, Map headers) {
return makeCanonicalString(method, bucket, key, pathArgs, headers, null);
}
/**
* Calculate the canonical string. When expires is non-null, it will be
* used instead of the Date header.
*/
static String makeCanonicalString(String method, String bucketName, String key, Map pathArgs,
Map headers, String expires)
{
StringBuffer buf = new StringBuffer();
buf.append(method + "\n");
// Add all interesting headers to a list, then sort them. "Interesting"
// is defined as Content-MD5, Content-Type, Date, and x-amz-
SortedMap interestingHeaders = new TreeMap();
if (headers != null) {
for (Iterator i = headers.keySet().iterator(); i.hasNext(); ) {
String hashKey = (String)i.next();
if (hashKey == null) continue;
String lk = hashKey.toLowerCase();
// Ignore any headers that are not particularly interesting.
if (lk.equals("content-type") || lk.equals("content-md5") || lk.equals("date") ||
lk.startsWith(AMAZON_HEADER_PREFIX))
{
List s = (List)headers.get(hashKey);
interestingHeaders.put(lk, concatenateList(s));
}
}
}
if (interestingHeaders.containsKey(ALTERNATIVE_DATE_HEADER)) {
interestingHeaders.put("date", "");
}
// if the expires is non-null, use that for the date field. this
// trumps the x-amz-date behavior.
if (expires != null) {
interestingHeaders.put("date", expires);
}
// these headers require that we still put a new line in after them,
// even if they don't exist.
if (! interestingHeaders.containsKey("content-type")) {
interestingHeaders.put("content-type", "");
}
if (! interestingHeaders.containsKey("content-eTag")) {
interestingHeaders.put("content-eTag", "");
}
// Finally, add all the interesting headers (i.e.: all that startwith x-amz- ;-))
for (Iterator i = interestingHeaders.keySet().iterator(); i.hasNext(); ) {
String headerKey = (String)i.next();
if (headerKey.startsWith(AMAZON_HEADER_PREFIX)) {
buf.append(headerKey).append(':').append(interestingHeaders.get(headerKey));
} else {
buf.append(interestingHeaders.get(headerKey));
}
buf.append("\n");
}
// build the path using the bucket and key
if (bucketName != null && !bucketName.equals("")) {
buf.append("/" + bucketName );
}
// append the key (it might be an empty string)
// append a slash regardless
buf.append("/");
if(key != null) {
buf.append(key);
}
// if there is an acl, logging or torrent parameter
// add them to the string
if (pathArgs != null ) {
if (pathArgs.containsKey("acl")) {
buf.append("?acl");
} else if (pathArgs.containsKey("torrent")) {
buf.append("?torrent");
} else if (pathArgs.containsKey("logging")) {
buf.append("?logging");
} else if (pathArgs.containsKey("location")) {
buf.append("?location");
}
}
return buf.toString();
}
/**
* Calculate the HMAC/SHA1 on a string.
* @param data Data to sign
* @param passcode Passcode to sign it with
* @return Signature
* @throws NoSuchAlgorithmException If the algorithm does not exist. Unlikely
* @throws InvalidKeyException If the key is invalid.
*/
static String encode(String awsSecretAccessKey, String canonicalString,
boolean urlencode)
{
// The following HMAC/SHA1 code for the signature is taken from the
// AWS Platform's implementation of RFC2104 (amazon.webservices.common.Signature)
//
// Acquire an HMAC/SHA1 from the raw key bytes.
SecretKeySpec signingKey =
new SecretKeySpec(awsSecretAccessKey.getBytes(), HMAC_SHA1_ALGORITHM);
// Acquire the MAC instance and initialize with the signing key.
Mac mac = null;
try {
mac = Mac.getInstance(HMAC_SHA1_ALGORITHM);
} catch (NoSuchAlgorithmException e) {
// should not happen
throw new RuntimeException("Could not find sha1 algorithm", e);
}
try {
mac.init(signingKey);
} catch (InvalidKeyException e) {
// also should not happen
throw new RuntimeException("Could not initialize the MAC algorithm", e);
}
// Compute the HMAC on the digest, and set it.
String b64 = Base64.encodeBytes(mac.doFinal(canonicalString.getBytes()));
if (urlencode) {
return urlencode(b64);
} else {
return b64;
}
}
static Map paramsForListOptions(String prefix, String marker, Integer maxKeys) {
return paramsForListOptions(prefix, marker, maxKeys, null);
}
static Map paramsForListOptions(String prefix, String marker, Integer maxKeys, String delimiter) {
Map argParams = new HashMap();
// these three params must be url encoded
if (prefix != null)
argParams.put("prefix", urlencode(prefix));
if (marker != null)
argParams.put("marker", urlencode(marker));
if (delimiter != null)
argParams.put("delimiter", urlencode(delimiter));
if (maxKeys != null)
argParams.put("max-keys", Integer.toString(maxKeys.intValue()));
return argParams;
}
/**
* Converts the Path Arguments from a map to String which can be used in url construction
* @param pathArgs a map of arguments
* @return a string representation of pathArgs
*/
public static String convertPathArgsHashToString(Map pathArgs) {
StringBuffer pathArgsString = new StringBuffer();
String argumentValue;
boolean firstRun = true;
if (pathArgs != null) {
for (Iterator argumentIterator = pathArgs.keySet().iterator(); argumentIterator.hasNext(); ) {
String argument = (String)argumentIterator.next();
if (firstRun) {
firstRun = false;
pathArgsString.append("?");
} else {
pathArgsString.append("&");
}
argumentValue = (String)pathArgs.get(argument);
pathArgsString.append(argument);
if (argumentValue != null) {
pathArgsString.append("=");
pathArgsString.append(argumentValue);
}
}
}
return pathArgsString.toString();
}
static String urlencode(String unencoded) {
try {
return URLEncoder.encode(unencoded, "UTF-8");
} catch (UnsupportedEncodingException e) {
// should never happen
throw new RuntimeException("Could not url encode to UTF-8", e);
}
}
static XMLReader createXMLReader() {
try {
return XMLReaderFactory.createXMLReader();
} catch (SAXException e) {
// oops, lets try doing this (needed in 1.4)
System.setProperty("org.xml.sax.driver", "org.apache.crimson.parser.XMLReaderImpl");
}
try {
// try once more
return XMLReaderFactory.createXMLReader();
} catch (SAXException e) {
throw new RuntimeException("Couldn't initialize a sax driver for the XMLReader");
}
}
/**
* Concatenates a bunch of header values, seperating them with a comma.
* @param values List of header values.
* @return String of all headers, with commas.
*/
private static String concatenateList(List values) {
StringBuffer buf = new StringBuffer();
for (int i = 0, size = values.size(); i < size; ++ i) {
buf.append(((String)values.get(i)).replaceAll("\n", "").trim());
if (i != (size - 1)) {
buf.append(",");
}
}
return buf.toString();
}
/**
* Validate bucket-name
*/
static boolean validateBucketName(String bucketName, CallingFormat callingFormat, boolean located) {
if (callingFormat == CallingFormat.getPathCallingFormat())
{
final int MIN_BUCKET_LENGTH = 3;
final int MAX_BUCKET_LENGTH = 255;
final String BUCKET_NAME_REGEX = "^[0-9A-Za-z\\.\\-_]*$";
return null != bucketName &&
bucketName.length() >= MIN_BUCKET_LENGTH &&
bucketName.length() <= MAX_BUCKET_LENGTH &&
bucketName.matches(BUCKET_NAME_REGEX);
} else {
return isValidSubdomainBucketName( bucketName );
}
}
static boolean isValidSubdomainBucketName( String bucketName ) {
final int MIN_BUCKET_LENGTH = 3;
final int MAX_BUCKET_LENGTH = 63;
// don't allow names that look like 127.0.0.1
final String IPv4_REGEX = "^[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+$";
// dns sub-name restrictions
final String BUCKET_NAME_REGEX = "^[a-z0-9]([a-z0-9\\-]*[a-z0-9])?(\\.[a-z0-9]([a-z0-9\\-]*[a-z0-9])?)*$";
// If there wasn't a location-constraint, then the current actual
// restriction is just that no 'part' of the name (i.e. sequence
// of characters between any 2 '.'s has to be 63) but the recommendation
// is to keep the entire bucket name under 63.
return null != bucketName &&
bucketName.length() >= MIN_BUCKET_LENGTH &&
bucketName.length() <= MAX_BUCKET_LENGTH &&
!bucketName.matches(IPv4_REGEX) &&
bucketName.matches(BUCKET_NAME_REGEX);
}
static CallingFormat getCallingFormatForBucket( CallingFormat desiredFormat, String bucketName ) {
CallingFormat callingFormat = desiredFormat;
if ( callingFormat == CallingFormat.getSubdomainCallingFormat() && ! Utils.isValidSubdomainBucketName( bucketName ) ) {
callingFormat = CallingFormat.getPathCallingFormat();
}
return callingFormat;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,84 +0,0 @@
This is one of a collection of interface libraries that can be used to interact
with the Amazon S3 system in a number of different languages. They each expose
two main interface classes, AWSAuthConnection and QueryStringAuthGenerator.
The first actually performs all the operations using the appropriate libraries
for the language, including header signing. The second,
QueryStringAuthGenerator, has the same interface, but instead of performing
the operation, this class will return urls with the right query string
authentication parameters set.
Basic Operations:
object requests:
GetResponse get(bucketName, keyName) - retrieves an object
GetResponse getACL(bucketName, keyName) - returns the xml acl doc
Response put(bucketName, keyName, object) - writes an object
Response putACL(bucketName, keyName, aclXMLDoc) - sets the xml acl doc
Response delete(bucketName, keyName) - deletes an object
bucket requests:
Response createBucket(bucketName, location) - creates a bucket
ListResponse listBucket(bucketName) - lists a bucket's contents
LocationResponse getBucketLocation(bucketName) - return the location-constraint of this bucket
GetResponse getBucketACL(bucketName) - returns the xml representation of this bucket's access control list
Response putBucketAcl(bucketName, aclXMLDoc) - sets xml representation of the bucket acl
Response deleteBucket(bucketName) - delete an empty bucket
GetResponse getBucketLogging(bucketName) - returns the xml representation of this bucket's access logging configuration
Response putBucketLogging(bucketName, loggingXMLDoc) - sets the xml representation of the bucket logging configuration
ListAllMyBucketsResponse listAllMyBuckets() - returns a list of all buckets owned by this AWS Access Key Id
Dependencies:
None, beyond the standard libraries.
Notes:
Please note that this uses the public domain iHarder.net Base64 library. For updates to that library,
see http://iharder.sourceforge.net/current/java/base64/ .
If your bucket name contains periods, you will need to use a non-HTTPS connection as the SSL certificate
presented by s3.amazonaws.com will not match if you do.
Limitations:
One of the main limitations of these sample AWSAuthConnection implementations
is that the interfaces are not streaming. This means that you have to pass the
data in as a string or as a byte array and the operation returns a string or a
byte array back. This is conceptually simpler, and fine for smaller objects,
but large objects, say a couple megabytes or greater, will show poor
performance, since everything is being passed around in memory. More
sophisticated libraries would pass streams in and out, and would only read the
data on-demand, rather than storing it all in memory (S3 itself would have no
problem with such streaming applications). Another upshot of this is that the
interfaces are all blocking---that is, you cannot look at the data until all of
it has downloaded. Again, this is fine for smaller objects, but unacceptable
for larger ones.
These libraries have nearly non-existent error handling. All errors from lower
libraries are simply passed up. The response code in the connection object needs
to be checked after each request to verify whether the request succeeded.
Only the java library has proper handling for repeated headers. The others
assume that each header will have only one value.
It is our intention that these libraries act as a starting point for future
development. They are meant to show off the various operations and provide an
example of how to negotiate the authentication process.
This software code is made available "AS IS" without warranties of any
kind. You may copy, display, modify and redistribute the software
code either by itself or as incorporated into your code; provided that
you do not remove any proprietary notices. Your use of this software
code is at your own risk and you waive any claim against Amazon
Digital Services, Inc. or its affiliates with respect to your use of
this software code. (c) 2006 Amazon Digital Services, Inc. or its
affiliates.

View File

@ -1,52 +0,0 @@
/**
*
* Copyright (C) 2009 Cloud Conscious, LLC. <info@cloudconscious.com>
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package com.amazon.s3;
import java.util.ArrayList;
import java.util.List;
import org.testng.annotations.Test;
/**
* Compares performance of date operations
*
* @author Adrian Cole
* @author James Murty
*/
@Test(sequential = true, timeOut = 2 * 60 * 1000, testName = "s3.DateTest")
public class DateServiceTest extends org.jclouds.date.DateServiceTest {
@Test
void testAmazonParseDateSerialResponseTime() {
for (int i = 0; i < LOOP_COUNT; i++)
AWSAuthConnection.httpDate();
}
@Test
void testFormatAmazonDatePerformanceInParallel() throws Throwable {
List<Runnable> tasks = new ArrayList<Runnable>(testData.length);
tasks.add(new Runnable() {
public void run() {
AWSAuthConnection.httpDate();
}
});
executeMultiThreadedPerformanceTest("testFormatAmazonDatePerformanceInParallel", tasks);
}
}

View File

@ -1,162 +0,0 @@
/**
*
* Copyright (C) 2009 Cloud Conscious, LLC. <info@cloudconscious.com>
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package com.amazon.s3;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import org.jclouds.date.internal.SimpleDateFormatDateService;
import org.testng.annotations.Test;
/**
* Compares performance of xml parsing apis.
*
* @author Adrian Cole
*/
@Test(sequential = true, timeOut = 2 * 60 * 1000, testName = "s3.S3ParserTest")
public class S3ParserTest extends org.jclouds.aws.s3.xml.S3ParserTest {
class MockHttpURLClient extends HttpURLConnection {
private String content;
@Override
public InputStream getInputStream() throws IOException {
return org.jclouds.util.Utils.toInputStream(content);
}
protected MockHttpURLClient(String content) {
super(null);
this.content = content;
}
public void disconnect() {
}
public boolean usingProxy() {
return false;
}
@Override
public int getResponseCode() throws IOException {
return 200;
}
public void connect() throws IOException {
}
}
@Test
void testAmazonParseListAllMyBucketsSerialResponseTime() throws IOException {
for (int i = 0; i < LOOP_COUNT; i++)
runAmazonParseListAllMyBuckets();
}
@Test
void testAmazonParseListAllMyBucketsParallelResponseTime() throws InterruptedException,
ExecutionException {
CompletionService<Boolean> completer = new ExecutorCompletionService<Boolean>(exec);
for (int i = 0; i < LOOP_COUNT; i++)
completer.submit(new Callable<Boolean>() {
public Boolean call() throws IOException {
runAmazonParseListAllMyBuckets();
return true;
}
});
for (int i = 0; i < LOOP_COUNT; i++)
assert completer.take().get();
}
@SuppressWarnings("unchecked")
@Test(enabled = false)
public void testAmazonCanParseListAllMyBuckets() throws IOException {
ListAllMyBucketsResponse response = runAmazonParseListAllMyBuckets();
List<Bucket> buckets = response.entries;
Bucket bucket1 = (Bucket) buckets.get(0);
assert bucket1.name.equals("adrianjbosstest");
Date expectedDate1 = new SimpleDateFormatDateService()
.iso8601DateParse("2009-03-12T02:00:07.000Z");
Date date1 = bucket1.creationDate;
assert date1.toString().equals(expectedDate1.toString());
Bucket bucket2 = (Bucket) buckets.get(1);
assert bucket2.name.equals("adrianjbosstest2");
Date expectedDate2 = new SimpleDateFormatDateService()
.iso8601DateParse("2009-03-12T02:00:09.000Z");
Date date2 = bucket2.creationDate;
assert date2.toString().equals(expectedDate2.toString());
assert buckets.size() == 2;
}
private ListAllMyBucketsResponse runAmazonParseListAllMyBuckets() throws IOException {
ListAllMyBucketsResponse response = new ListAllMyBucketsResponse(new MockHttpURLClient(
listAllMyBucketsResultOn200));
return response;
}
@Test(enabled = false)
public void testAmazonCanParseListBucketResult() throws IOException {
ListBucketResponse response = runAmazonParseListBucketResult();
ListEntry content = (ListEntry) response.entries.get(0);
assert content.key.equals("3366");
assert content.lastModified.equals(new SimpleDateFormatDateService()
.iso8601DateParse("2009-03-12T02:00:13.000Z"));
assert content.eTag.equals("\"9d7bb64e8e18ee34eec06dd2cf37b766\"");
assert content.size == 136;
assert content.owner.id
.equals("e1a5f66a480ca99a4fdfe8e318c3020446c9989d7004e7778029fbcc5d990fa0");
assert content.owner.displayName.equals("ferncam");
assert content.storageClass.equals("STANDARD");
}
private ListBucketResponse runAmazonParseListBucketResult() throws IOException {
ListBucketResponse response = new ListBucketResponse(new MockHttpURLClient(
listAllMyBucketsResultOn200));
return response;
}
@Test(enabled = false)
void testAmazonParseListBucketResultSerialResponseTime() throws IOException {
for (int i = 0; i < LOOP_COUNT; i++)
runAmazonParseListBucketResult();
}
@Test(enabled = false)
void testAmazonParseListBucketResultParallelResponseTime() throws InterruptedException,
ExecutionException {
CompletionService<Boolean> completer = new ExecutorCompletionService<Boolean>(exec);
for (int i = 0; i < LOOP_COUNT; i++)
completer.submit(new Callable<Boolean>() {
public Boolean call() throws IOException {
runAmazonParseListBucketResult();
return true;
}
});
for (int i = 0; i < LOOP_COUNT; i++)
assert completer.take().get();
}
}

View File

@ -18,14 +18,12 @@
*/ */
package org.jclouds.aws.s3; package org.jclouds.aws.s3;
import java.io.ByteArrayInputStream;
import java.io.File; import java.io.File;
import java.io.InputStream; import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import org.jets3t.service.S3ServiceException; import org.jets3t.service.S3ServiceException;
@ -33,7 +31,11 @@ import org.testng.ITestContext;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import com.amazon.s3.AWSAuthConnection; import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
/** /**
* Runs operations that amazon s3 sample code is capable of performing. * Runs operations that amazon s3 sample code is capable of performing.
@ -42,42 +44,19 @@ import com.amazon.s3.AWSAuthConnection;
*/ */
@Test(sequential = true, timeOut = 2 * 60 * 1000, testName = "perftest.AmazonPerformanceLiveTest", groups = { "live" }) @Test(sequential = true, timeOut = 2 * 60 * 1000, testName = "perftest.AmazonPerformanceLiveTest", groups = { "live" })
public class AmazonPerformanceLiveTest extends BasePerformanceLiveTest { public class AmazonPerformanceLiveTest extends BasePerformanceLiveTest {
private AWSAuthConnection amzClient; private AmazonS3 s3;
@BeforeClass(groups = { "live" }, dependsOnMethods = "setUpResourcesOnThisThread") @BeforeClass(groups = { "live" }, dependsOnMethods = "setUpResourcesOnThisThread")
protected void createLiveS3Context(ITestContext testContext) throws S3ServiceException { protected void createLiveS3Context(ITestContext testContext) throws S3ServiceException {
if (testContext.getAttribute("jclouds.test.user") != null) { exec = Executors.newCachedThreadPool();
amzClient = new AWSAuthConnection((String) testContext.getAttribute("jclouds.test.user"), if (testContext.getAttribute("jclouds.test.identity") != null) {
(String) testContext.getAttribute("jclouds.test.key"), false); s3 = new AmazonS3Client(new BasicAWSCredentials((String) testContext.getAttribute("jclouds.test.identity"),
(String) testContext.getAttribute("jclouds.test.credential")));
} else { } else {
throw new RuntimeException("not configured properly"); throw new RuntimeException("not configured properly");
} }
} }
@Override
@Test(enabled = false)
public void testPutFileSerial() throws Exception {
throw new UnsupportedOperationException();
}
@Override
@Test(enabled = false)
public void testPutFileParallel() throws InterruptedException, ExecutionException {
throw new UnsupportedOperationException();
}
@Override
@Test(enabled = false)
public void testPutInputStreamSerial() throws Exception {
throw new UnsupportedOperationException();
}
@Override
@Test(enabled = false)
public void testPutInputStreamParallel() throws InterruptedException, ExecutionException {
throw new UnsupportedOperationException();
}
@Override @Override
@Test(enabled = false) @Test(enabled = false)
public void testPutStringSerial() throws Exception { public void testPutStringSerial() throws Exception {
@ -92,28 +71,45 @@ public class AmazonPerformanceLiveTest extends BasePerformanceLiveTest {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Override @Override
protected Future<?> putByteArray(final String bucket, final String key, byte[] data, protected Future<?> putByteArray(final String bucket, final String key, final byte[] data, final String contentType) {
String contentType) {
final com.amazon.s3.S3Object object = new com.amazon.s3.S3Object(data, null);
final Map<String, List<String>> headers = new TreeMap<String, List<String>>();
headers.put("Content-Type", Arrays.asList(new String[] { contentType }));
return exec.submit(new Callable() { return exec.submit(new Callable() {
@Override @Override
public Object call() throws Exception { public Object call() throws Exception {
return amzClient.put(bucket, key, object, headers).connection.getResponseMessage(); ObjectMetadata md = new ObjectMetadata();
md.setContentType(contentType);
md.setContentLength(data.length);
return s3.putObject(new PutObjectRequest(bucket, key, new ByteArrayInputStream(data), md)).getETag();
} }
}); });
} }
@SuppressWarnings("unchecked")
@Override @Override
protected Future<?> putFile(String bucket, String key, File data, String contentType) { protected Future<?> putFile(final String bucket, final String key, final File data, String contentType) {
throw new UnsupportedOperationException();
return exec.submit(new Callable() {
@Override
public Object call() throws Exception {
return s3.putObject(new PutObjectRequest(bucket, key, data)).getETag();
}
});
} }
@SuppressWarnings("unchecked")
@Override @Override
protected Future<?> putInputStream(String bucket, String key, InputStream data, protected Future<?> putInputStream(final String bucket, final String key, final InputStream data,
String contentType) { final String contentType) {
throw new UnsupportedOperationException();
return exec.submit(new Callable() {
@Override
public Object call() throws Exception {
ObjectMetadata md = new ObjectMetadata();
md.setContentType(contentType);
md.setContentLength(data.available());
return s3.putObject(new PutObjectRequest(bucket, key, data, md)).getETag();
}
});
} }
@Override @Override

View File

@ -18,7 +18,8 @@
*/ */
package org.jclouds.aws.s3; package org.jclouds.aws.s3;
import static org.jclouds.Constants.*; import static org.jclouds.Constants.PROPERTY_IO_WORKER_THREADS;
import static org.jclouds.Constants.PROPERTY_MAX_CONNECTIONS_PER_CONTEXT;
import static org.jclouds.Constants.PROPERTY_MAX_CONNECTIONS_PER_HOST; import static org.jclouds.Constants.PROPERTY_MAX_CONNECTIONS_PER_HOST;
import static org.jclouds.Constants.PROPERTY_USER_THREADS; import static org.jclouds.Constants.PROPERTY_USER_THREADS;
@ -30,7 +31,7 @@ import java.util.concurrent.Future;
import org.jclouds.aws.s3.domain.S3Object; import org.jclouds.aws.s3.domain.S3Object;
import com.google.appengine.repackaged.com.google.common.base.Throwables; import com.google.common.base.Throwables;
/** /**
* // TODO: Adrian: Document this! * // TODO: Adrian: Document this!
@ -57,12 +58,10 @@ public abstract class BaseJCloudsPerformanceLiveTest extends BasePerformanceLive
// } // }
protected void overrideWithSysPropertiesAndPrint(Properties overrides, String contextName) { protected void overrideWithSysPropertiesAndPrint(Properties overrides, String contextName) {
overrides.putAll(System.getProperties()); overrides.putAll(System.getProperties());
System.out.printf( System.out.printf("%s: loopCount(%s), perContext(%s), perHost(%s),ioWorkers(%s), userThreads(%s)%n", contextName,
"%s: loopCount(%s), perContext(%s), perHost(%s),ioWorkers(%s), userThreads(%s)%n", loopCount, overrides.getProperty(PROPERTY_MAX_CONNECTIONS_PER_CONTEXT), overrides
contextName, loopCount, overrides.getProperty(PROPERTY_MAX_CONNECTIONS_PER_CONTEXT), .getProperty(PROPERTY_MAX_CONNECTIONS_PER_HOST), overrides.getProperty(PROPERTY_IO_WORKER_THREADS),
overrides.getProperty(PROPERTY_MAX_CONNECTIONS_PER_HOST), overrides overrides.getProperty(PROPERTY_USER_THREADS));
.getProperty(PROPERTY_IO_WORKER_THREADS), overrides
.getProperty(PROPERTY_USER_THREADS));
} }
@Override @Override
@ -90,16 +89,15 @@ public abstract class BaseJCloudsPerformanceLiveTest extends BasePerformanceLive
} }
@Override @Override
protected Future<?> putInputStream(String bucket, String key, InputStream data, protected Future<?> putInputStream(String bucket, String key, InputStream data, String contentType) {
String contentType) {
S3Object object = newObject(key); S3Object object = newObject(key);
object.getMetadata().setContentType(contentType);
object.setPayload(data); object.setPayload(data);
try { try {
object.setContentLength(new Long(data.available())); object.getPayload().setContentLength(new Long(data.available()));
} catch (IOException e) { } catch (IOException e) {
Throwables.propagate(e); Throwables.propagate(e);
} }
object.getPayload().setContentType(contentType);
return getApi().putObject(bucket, object); return getApi().putObject(bucket, object);
} }

View File

@ -50,7 +50,7 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
containerCount = 1; containerCount = 1;
} }
protected int timeoutSeconds = 15; protected int timeoutSeconds = 15;
protected int loopCount = 1000; protected int loopCount = Integer.parseInt(System.getProperty("jclouds.test.loopcount", "1000"));
protected ExecutorService exec; protected ExecutorService exec;
protected Logger logger = Logger.NULL;; protected Logger logger = Logger.NULL;;
@ -65,8 +65,7 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
} }
@Test @Test
public void testPutBytesParallel() throws InterruptedException, ExecutionException, public void testPutBytesParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
String bucketName = getContainerName(); String bucketName = getContainerName();
try { try {
doParallel(new PutBytesFuture(bucketName), loopCount, bucketName); doParallel(new PutBytesFuture(bucketName), loopCount, bucketName);
@ -86,8 +85,7 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
} }
@Test @Test
public void testPutFileParallel() throws InterruptedException, ExecutionException, public void testPutFileParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
String bucketName = getContainerName(); String bucketName = getContainerName();
try { try {
doParallel(new PutFileFuture(bucketName), loopCount, bucketName); doParallel(new PutFileFuture(bucketName), loopCount, bucketName);
@ -107,8 +105,7 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
} }
@Test @Test
public void testPutInputStreamParallel() throws InterruptedException, ExecutionException, public void testPutInputStreamParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
String bucketName = getContainerName(); String bucketName = getContainerName();
try { try {
doParallel(new PutInputStreamFuture(bucketName), loopCount, bucketName); doParallel(new PutInputStreamFuture(bucketName), loopCount, bucketName);
@ -128,8 +125,7 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
} }
@Test @Test
public void testPutStringParallel() throws InterruptedException, ExecutionException, public void testPutStringParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
String bucketName = getContainerName(); String bucketName = getContainerName();
try { try {
doParallel(new PutStringFuture(bucketName), loopCount, bucketName); doParallel(new PutStringFuture(bucketName), loopCount, bucketName);
@ -138,14 +134,13 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
} }
} }
private void doSerial(Provider<ListenableFuture<?>> provider, int loopCount) throws Exception, private void doSerial(Provider<ListenableFuture<?>> provider, int loopCount) throws Exception, ExecutionException {
ExecutionException {
for (int i = 0; i < loopCount; i++) for (int i = 0; i < loopCount; i++)
assert provider.get().get() != null; assert provider.get().get() != null;
} }
private void doParallel(Provider<ListenableFuture<?>> provider, int loopCount, private void doParallel(Provider<ListenableFuture<?>> provider, int loopCount, String containerName)
String containerName) throws InterruptedException, ExecutionException, TimeoutException { throws InterruptedException, ExecutionException, TimeoutException {
Map<Integer, ListenableFuture<?>> responses = Maps.newHashMap(); Map<Integer, ListenableFuture<?>> responses = Maps.newHashMap();
for (int i = 0; i < loopCount; i++) for (int i = 0; i < loopCount; i++)
responses.put(i, provider.get()); responses.put(i, provider.get());
@ -178,8 +173,7 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
} }
public ListenableFuture<?> get() { public ListenableFuture<?> get() {
return Futures.makeListenable(putFile(bucketName, key.getAndIncrement() + "", file, return Futures.makeListenable(putFile(bucketName, key.getAndIncrement() + "", file, "text/xml"));
"text/xml"));
} }
} }
@ -195,8 +189,8 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
@Override @Override
public ListenableFuture<?> get() { public ListenableFuture<?> get() {
return Futures.makeListenable(putInputStream(bucketName, key.getAndIncrement() + "", return Futures.makeListenable(putInputStream(bucketName, key.getAndIncrement() + "", new ByteArrayInputStream(
new ByteArrayInputStream(test), "application/octetstring")); test), "application/octetstring"));
} }
} }
@ -211,18 +205,15 @@ public abstract class BasePerformanceLiveTest extends BaseBlobStoreIntegrationTe
} }
public ListenableFuture<?> get() { public ListenableFuture<?> get() {
return Futures.makeListenable(putString(bucketName, key.getAndIncrement() + "", return Futures.makeListenable(putString(bucketName, key.getAndIncrement() + "", testString, "text/plain"));
testString, "text/plain"));
} }
} }
protected abstract Future<?> putByteArray(String bucket, String key, byte[] data, protected abstract Future<?> putByteArray(String bucket, String key, byte[] data, String contentType);
String contentType);
protected abstract Future<?> putFile(String bucket, String key, File data, String contentType); protected abstract Future<?> putFile(String bucket, String key, File data, String contentType);
protected abstract Future<?> putInputStream(String bucket, String key, InputStream data, protected abstract Future<?> putInputStream(String bucket, String key, InputStream data, String contentType);
String contentType);
protected abstract Future<?> putString(String bucket, String key, String data, String contentType); protected abstract Future<?> putString(String bucket, String key, String data, String contentType);

View File

@ -28,6 +28,7 @@ import static org.jclouds.Constants.PROPERTY_USER_THREADS;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import org.jclouds.blobstore.BlobStoreContextFactory;
import org.jclouds.enterprise.config.EnterpriseConfigurationModule; import org.jclouds.enterprise.config.EnterpriseConfigurationModule;
import org.jclouds.http.apachehc.config.ApacheHCHttpCommandExecutorServiceModule; import org.jclouds.http.apachehc.config.ApacheHCHttpCommandExecutorServiceModule;
import org.jclouds.logging.config.NullLoggingModule; import org.jclouds.logging.config.NullLoggingModule;
@ -35,15 +36,17 @@ import org.testng.ITestContext;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@Test(sequential = true, testName = "perftest.JCloudsNioPerformanceLiveTest", groups = { "live" }) import com.google.common.collect.ImmutableSet;
@Test(sequential = true, testName = "perftest.JCloudsApacheHCPerformanceLiveTest", groups = { "live" })
public class JCloudsApacheHCPerformanceLiveTest extends BaseJCloudsPerformanceLiveTest { public class JCloudsApacheHCPerformanceLiveTest extends BaseJCloudsPerformanceLiveTest {
@Override @Override
@BeforeClass(groups = { "integration", "live" }) @BeforeClass(groups = { "integration", "live" })
public void setUpResourcesOnThisThread(ITestContext testContext) throws Exception { public void setUpResourcesOnThisThread(ITestContext testContext) throws Exception {
exec = Executors.newCachedThreadPool(); exec = Executors.newCachedThreadPool();
String accesskeyid = System.getProperty("jclouds.test.user"); String accesskeyid = System.getProperty("jclouds.test.identity");
String secretkey = System.getProperty("jclouds.test.key"); String secretkey = System.getProperty("jclouds.test.credential");
Properties overrides = new Properties(); Properties overrides = new Properties();
overrides.setProperty(PROPERTY_SO_TIMEOUT, 5000 + ""); overrides.setProperty(PROPERTY_SO_TIMEOUT, 5000 + "");
overrides.setProperty(PROPERTY_CONNECTION_TIMEOUT, 5000 + ""); overrides.setProperty(PROPERTY_CONNECTION_TIMEOUT, 5000 + "");
@ -53,9 +56,9 @@ public class JCloudsApacheHCPerformanceLiveTest extends BaseJCloudsPerformanceLi
overrides.setProperty(PROPERTY_USER_THREADS, 0 + ""); overrides.setProperty(PROPERTY_USER_THREADS, 0 + "");
String contextName = "enterprise"; String contextName = "enterprise";
overrideWithSysPropertiesAndPrint(overrides, contextName); overrideWithSysPropertiesAndPrint(overrides, contextName);
context = S3ContextFactory.createContext(overrides, accesskeyid, secretkey, context = new BlobStoreContextFactory().createContext("s3", accesskeyid, secretkey, ImmutableSet.of(
new NullLoggingModule(), new ApacheHCHttpCommandExecutorServiceModule(), new NullLoggingModule(), new ApacheHCHttpCommandExecutorServiceModule(),
new EnterpriseConfigurationModule()); new EnterpriseConfigurationModule()), overrides);
} }
@Override @Override

View File

@ -20,24 +20,23 @@ package org.jclouds.aws.s3;
import java.io.File; import java.io.File;
import java.io.InputStream; import java.io.InputStream;
import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.jclouds.blobstore.BlobStoreContext; import org.jclouds.blobstore.BlobStoreContext;
import org.jclouds.enterprise.config.EnterpriseConfigurationModule; import org.jclouds.blobstore.BlobStoreContextFactory;
import org.jclouds.gae.config.GoogleAppEngineConfigurationModule; import org.jclouds.gae.config.GoogleAppEngineConfigurationModule;
import org.jclouds.logging.config.NullLoggingModule; import org.jclouds.logging.config.NullLoggingModule;
import org.testng.annotations.AfterClass; import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.testng.v6.Maps;
import com.google.appengine.tools.development.ApiProxyLocalImpl; import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.apphosting.api.ApiProxy; import com.google.appengine.tools.development.testing.LocalURLFetchServiceTestConfig;
import com.google.common.collect.ImmutableSet;
/** /**
* *
@ -50,8 +49,7 @@ public class JCloudsGaePerformanceLiveTest extends BaseJCloudsPerformanceLiveTes
@Override @Override
@Test(enabled = false) @Test(enabled = false)
public void testPutBytesParallel() throws InterruptedException, ExecutionException, public void testPutBytesParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -63,8 +61,7 @@ public class JCloudsGaePerformanceLiveTest extends BaseJCloudsPerformanceLiveTes
@Override @Override
@Test(enabled = false) @Test(enabled = false)
public void testPutFileParallel() throws InterruptedException, ExecutionException, public void testPutFileParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -76,8 +73,7 @@ public class JCloudsGaePerformanceLiveTest extends BaseJCloudsPerformanceLiveTes
@Override @Override
@Test(enabled = false) @Test(enabled = false)
public void testPutInputStreamParallel() throws InterruptedException, ExecutionException, public void testPutInputStreamParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -89,8 +85,7 @@ public class JCloudsGaePerformanceLiveTest extends BaseJCloudsPerformanceLiveTes
@Override @Override
@Test(enabled = false) @Test(enabled = false)
public void testPutStringParallel() throws InterruptedException, ExecutionException, public void testPutStringParallel() throws InterruptedException, ExecutionException, TimeoutException {
TimeoutException {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -103,7 +98,8 @@ public class JCloudsGaePerformanceLiveTest extends BaseJCloudsPerformanceLiveTes
public JCloudsGaePerformanceLiveTest() { public JCloudsGaePerformanceLiveTest() {
super(); super();
// otherwise, we'll get timeout errors // otherwise, we'll get timeout errors
// TODO sdk 1.2.3 should give the ability to set a higher timeout then 5 seconds allowing this // TODO sdk 1.2.3 should give the ability to set a higher timeout then 5
// seconds allowing this
// to be removed // to be removed
loopCount = 5; loopCount = 5;
} }
@ -121,8 +117,7 @@ public class JCloudsGaePerformanceLiveTest extends BaseJCloudsPerformanceLiveTes
} }
@Override @Override
protected Future<?> putInputStream(String bucket, String key, InputStream data, protected Future<?> putInputStream(String bucket, String key, InputStream data, String contentType) {
String contentType) {
setupApiProxy(); setupApiProxy();
return super.putInputStream(bucket, key, data, contentType); return super.putInputStream(bucket, key, data, contentType);
} }
@ -135,64 +130,20 @@ public class JCloudsGaePerformanceLiveTest extends BaseJCloudsPerformanceLiveTes
@BeforeMethod @BeforeMethod
void setupApiProxy() { void setupApiProxy() {
ApiProxy.setEnvironmentForCurrentThread(new TestEnvironment()); new LocalServiceTestHelper(new LocalURLFetchServiceTestConfig()).setUp();
ApiProxy.setDelegate(new ApiProxyLocalImpl(new File(".")) {
});
}
class TestEnvironment implements ApiProxy.Environment {
public String getAppId() {
return "Unit Tests";
}
public String getVersionId() {
return "1.0";
}
public void setDefaultNamespace(String s) {
}
public String getRequestNamespace() {
return null;
}
public String getDefaultNamespace() {
return null;
}
public String getAuthDomain() {
return null;
}
public boolean isLoggedIn() {
return false;
}
public String getEmail() {
return null;
}
public boolean isAdmin() {
return false;
}
public Map<String, Object> getAttributes() {
return Maps.newHashMap();
}
} }
private BlobStoreContext perfContext; private BlobStoreContext perfContext;
@BeforeClass(groups = { "live" }) @BeforeClass(groups = { "live" })
void setup() { void setup() {
String accesskeyid = System.getProperty("jclouds.test.user"); String accesskeyid = System.getProperty("jclouds.test.identity");
String secretkey = System.getProperty("jclouds.test.key"); String secretkey = System.getProperty("jclouds.test.credential");
Properties overrides = new Properties(); Properties overrides = new Properties();
String contextName = "gae"; String contextName = "gae";
overrideWithSysPropertiesAndPrint(overrides, contextName); overrideWithSysPropertiesAndPrint(overrides, contextName);
this.perfContext = S3ContextFactory.createContext(overrides, accesskeyid, secretkey, context = new BlobStoreContextFactory().createContext("s3", accesskeyid, secretkey, ImmutableSet.of(
new NullLoggingModule(), new EnterpriseConfigurationModule(), new NullLoggingModule(), new GoogleAppEngineConfigurationModule()), overrides);
new GoogleAppEngineConfigurationModule());
} }
@AfterClass(groups = { "live" }) @AfterClass(groups = { "live" })

View File

@ -1,61 +0,0 @@
/**
*
* Copyright (C) 2009 Cloud Conscious, LLC. <info@cloudconscious.com>
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.jclouds.aws.s3;
import static org.jclouds.Constants.PROPERTY_IO_WORKER_THREADS;
import static org.jclouds.Constants.PROPERTY_MAX_CONNECTIONS_PER_CONTEXT;
import static org.jclouds.Constants.PROPERTY_MAX_CONNECTIONS_PER_HOST;
import static org.jclouds.Constants.PROPERTY_USER_THREADS;
import java.util.Properties;
import java.util.concurrent.Executors;
import org.jclouds.enterprise.config.EnterpriseConfigurationModule;
import org.jclouds.http.httpnio.config.NioTransformingHttpCommandExecutorServiceModule;
import org.jclouds.logging.config.NullLoggingModule;
import org.testng.ITestContext;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test(sequential = true, testName = "perftest.JCloudsNioPerformanceLiveTest", groups = { "live" })
public class JCloudsNioPerformanceLiveTest extends BaseJCloudsPerformanceLiveTest {
@Override
@BeforeClass(groups = { "integration", "live" })
public void setUpResourcesOnThisThread(ITestContext testContext) throws Exception {
exec = Executors.newCachedThreadPool();
String accesskeyid = System.getProperty("jclouds.test.user");
String secretkey = System.getProperty("jclouds.test.key");
Properties overrides = new Properties();
overrides.setProperty(PROPERTY_MAX_CONNECTIONS_PER_CONTEXT, 20 + "");
overrides.setProperty(PROPERTY_IO_WORKER_THREADS, 3 + "");
overrides.setProperty(PROPERTY_MAX_CONNECTIONS_PER_HOST, 12 + "");
overrides.setProperty(PROPERTY_USER_THREADS, 0 + "");
String contextName = "nio";
overrideWithSysPropertiesAndPrint(overrides, contextName);
context = S3ContextFactory.createContext(overrides, accesskeyid, secretkey,
new NullLoggingModule(), new EnterpriseConfigurationModule(),
new NioTransformingHttpCommandExecutorServiceModule());
}
@Override
public S3AsyncClient getApi() {
return (S3AsyncClient) context.getProviderSpecificContext().getAsyncApi();
}
}

View File

@ -26,12 +26,15 @@ import static org.jclouds.Constants.PROPERTY_USER_THREADS;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import org.jclouds.blobstore.BlobStoreContextFactory;
import org.jclouds.enterprise.config.EnterpriseConfigurationModule; import org.jclouds.enterprise.config.EnterpriseConfigurationModule;
import org.jclouds.logging.config.NullLoggingModule; import org.jclouds.logging.config.NullLoggingModule;
import org.testng.ITestContext; import org.testng.ITestContext;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import com.google.common.collect.ImmutableSet;
/** /**
* Tests the default JClouds client. * Tests the default JClouds client.
* *
@ -45,8 +48,8 @@ public class JCloudsPerformanceLiveTest extends BaseJCloudsPerformanceLiveTest {
@BeforeClass(groups = { "integration", "live" }) @BeforeClass(groups = { "integration", "live" })
public void setUpResourcesOnThisThread(ITestContext testContext) throws Exception { public void setUpResourcesOnThisThread(ITestContext testContext) throws Exception {
exec = Executors.newCachedThreadPool(); exec = Executors.newCachedThreadPool();
String accesskeyid = System.getProperty("jclouds.test.user"); String accesskeyid = System.getProperty("jclouds.test.identity");
String secretkey = System.getProperty("jclouds.test.key"); String secretkey = System.getProperty("jclouds.test.credential");
Properties overrides = new Properties(); Properties overrides = new Properties();
overrides.setProperty(PROPERTY_MAX_CONNECTIONS_PER_CONTEXT, 50 + ""); overrides.setProperty(PROPERTY_MAX_CONNECTIONS_PER_CONTEXT, 50 + "");
overrides.setProperty(PROPERTY_MAX_CONNECTIONS_PER_HOST, 30 + ""); overrides.setProperty(PROPERTY_MAX_CONNECTIONS_PER_HOST, 30 + "");
@ -54,8 +57,8 @@ public class JCloudsPerformanceLiveTest extends BaseJCloudsPerformanceLiveTest {
overrides.setProperty(PROPERTY_USER_THREADS, 0 + ""); overrides.setProperty(PROPERTY_USER_THREADS, 0 + "");
String contextName = "standard"; String contextName = "standard";
overrideWithSysPropertiesAndPrint(overrides, contextName); overrideWithSysPropertiesAndPrint(overrides, contextName);
context = S3ContextFactory.createContext(overrides, accesskeyid, secretkey, context = new BlobStoreContextFactory().createContext("s3", accesskeyid, secretkey, ImmutableSet.of(
new NullLoggingModule(), new EnterpriseConfigurationModule()); new NullLoggingModule(), new EnterpriseConfigurationModule()), overrides);
} }
@Override @Override

View File

@ -69,10 +69,10 @@ public class Jets3tPerformanceLiveTest extends BasePerformanceLiveTest {
@BeforeClass(groups = { "live" }, dependsOnMethods = "setUpResourcesOnThisThread") @BeforeClass(groups = { "live" }, dependsOnMethods = "setUpResourcesOnThisThread")
protected void createLiveS3Context(ITestContext testContext) throws S3ServiceException { protected void createLiveS3Context(ITestContext testContext) throws S3ServiceException {
if (testContext.getAttribute("jclouds.test.user") != null) { if (testContext.getAttribute("jclouds.test.identity") != null) {
AWSCredentials credentials = new AWSCredentials((String) testContext AWSCredentials credentials = new AWSCredentials((String) testContext
.getAttribute("jclouds.test.user"), (String) testContext .getAttribute("jclouds.test.identity"), (String) testContext
.getAttribute("jclouds.test.key")); .getAttribute("jclouds.test.credential"));
jetClient = new RestS3Service(credentials); jetClient = new RestS3Service(credentials);
} else { } else {
throw new RuntimeException("not configured properly"); throw new RuntimeException("not configured properly");