From ab33d54fe3fa16579d1ebbc18ca7051ef1d12756 Mon Sep 17 00:00:00 2001 From: Colin McCabe Date: Wed, 6 Nov 2013 07:57:01 +0000 Subject: [PATCH] HADOOP-9623. Update jet3t dependency to 0.9.0 (Amandeep Khurana via Colin Patrick McCabe) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1539253 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 + .../hadoop/fs/s3/Jets3tFileSystemStore.java | 32 ++++- .../apache/hadoop/fs/s3/MigrationTool.java | 9 +- .../s3native/Jets3tNativeFileSystemStore.java | 126 ++++++++++++++---- .../fs/s3native/NativeS3FileSystem.java | 6 +- hadoop-project/pom.xml | 7 +- 6 files changed, 141 insertions(+), 42 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 4cc9939aac8..5454c6b5c2d 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -105,6 +105,9 @@ Trunk (Unreleased) HADOOP-9833 move slf4j to version 1.7.5 (Kousuke Saruta via stevel) + HADOOP-9623 Update jets3t dependency to 0.9.0. (Amandeep Khurana via Colin + Patrick McCabe) + BUG FIXES HADOOP-9451. Fault single-layer config if node group topology is enabled. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java index 60a548eeefa..241ec0f3277 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java @@ -42,6 +42,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3.INode.FileType; import org.jets3t.service.S3Service; import org.jets3t.service.S3ServiceException; +import org.jets3t.service.ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Bucket; import org.jets3t.service.model.S3Object; @@ -60,8 +61,8 @@ class Jets3tFileSystemStore implements FileSystemStore { private static final String FILE_SYSTEM_VERSION_NAME = "fs-version"; private static final String FILE_SYSTEM_VERSION_VALUE = "1"; - private static final Map METADATA = - new HashMap(); + private static final Map METADATA = + new HashMap(); static { METADATA.put(FILE_SYSTEM_NAME, FILE_SYSTEM_VALUE); @@ -165,7 +166,7 @@ class Jets3tFileSystemStore implements FileSystemStore { throws IOException { try { - S3Object object = s3Service.getObject(bucket, key); + S3Object object = s3Service.getObject(bucket.getName(), key); if (checkMetadata) { checkMetadata(object); } @@ -178,6 +179,9 @@ class Jets3tFileSystemStore implements FileSystemStore { throw (IOException) e.getCause(); } throw new S3Exception(e); + } catch (ServiceException e) { + handleServiceException(e); + return null; } } @@ -194,6 +198,9 @@ class Jets3tFileSystemStore implements FileSystemStore { throw (IOException) e.getCause(); } throw new S3Exception(e); + } catch (ServiceException e) { + handleServiceException(e); + return null; } } @@ -276,7 +283,7 @@ class Jets3tFileSystemStore implements FileSystemStore { if (!prefix.endsWith(PATH_DELIMITER)) { prefix += PATH_DELIMITER; } - S3Object[] objects = s3Service.listObjects(bucket, prefix, PATH_DELIMITER); + S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, PATH_DELIMITER); Set prefixes = new TreeSet(); for (int i = 0; i < objects.length; i++) { prefixes.add(keyToPath(objects[i].getKey())); @@ -298,7 +305,7 @@ class Jets3tFileSystemStore implements FileSystemStore { if (!prefix.endsWith(PATH_DELIMITER)) { prefix += PATH_DELIMITER; } - S3Object[] objects = s3Service.listObjects(bucket, prefix, null); + S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, null); Set prefixes = new TreeSet(); for (int i = 0; i < objects.length; i++) { prefixes.add(keyToPath(objects[i].getKey())); @@ -385,7 +392,7 @@ class Jets3tFileSystemStore implements FileSystemStore { @Override public void purge() throws IOException { try { - S3Object[] objects = s3Service.listObjects(bucket); + S3Object[] objects = s3Service.listObjects(bucket.getName()); for (int i = 0; i < objects.length; i++) { s3Service.deleteObject(bucket, objects[i].getKey()); } @@ -402,7 +409,7 @@ class Jets3tFileSystemStore implements FileSystemStore { StringBuilder sb = new StringBuilder("S3 Filesystem, "); sb.append(bucket.getName()).append("\n"); try { - S3Object[] objects = s3Service.listObjects(bucket, PATH_DELIMITER, null); + S3Object[] objects = s3Service.listObjects(bucket.getName(), PATH_DELIMITER, null); for (int i = 0; i < objects.length; i++) { Path path = keyToPath(objects[i].getKey()); sb.append(path).append("\n"); @@ -424,4 +431,15 @@ class Jets3tFileSystemStore implements FileSystemStore { System.out.println(sb); } + private void handleServiceException(ServiceException e) throws IOException { + if (e.getCause() instanceof IOException) { + throw (IOException) e.getCause(); + } + else { + if(LOG.isDebugEnabled()) { + LOG.debug("Got ServiceException with Error code: " + e.getErrorCode() + ";and Error message: " + e.getErrorMessage()); + } + } + } + } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java index 416bfb17c46..429c272e53a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java @@ -34,6 +34,7 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.jets3t.service.S3Service; import org.jets3t.service.S3ServiceException; +import org.jets3t.service.ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Bucket; import org.jets3t.service.model.S3Object; @@ -177,7 +178,7 @@ public class MigrationTool extends Configured implements Tool { private S3Object get(String key) { try { - return s3Service.getObject(bucket, key); + return s3Service.getObject(bucket.getName(), key); } catch (S3ServiceException e) { if ("NoSuchKey".equals(e.getS3ErrorCode())) { return null; @@ -200,7 +201,7 @@ public class MigrationTool extends Configured implements Tool { public Set listAllPaths() throws IOException { try { String prefix = urlEncode(Path.SEPARATOR); - S3Object[] objects = s3Service.listObjects(bucket, prefix, null); + S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, null); Set prefixes = new TreeSet(); for (int i = 0; i < objects.length; i++) { prefixes.add(keyToPath(objects[i].getKey())); @@ -237,7 +238,7 @@ public class MigrationTool extends Configured implements Tool { private InputStream get(String key) throws IOException { try { - S3Object object = s3Service.getObject(bucket, key); + S3Object object = s3Service.getObject(bucket.getName(), key); return object.getDataInputStream(); } catch (S3ServiceException e) { if ("NoSuchKey".equals(e.getS3ErrorCode())) { @@ -247,6 +248,8 @@ public class MigrationTool extends Configured implements Tool { throw (IOException) e.getCause(); } throw new S3Exception(e); + } catch (ServiceException e) { + return null; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java index 400419c110b..e05ed09f586 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java @@ -29,17 +29,21 @@ import java.io.IOException; import java.io.InputStream; import java.net.URI; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3.S3Credentials; import org.apache.hadoop.fs.s3.S3Exception; -import org.jets3t.service.S3ObjectsChunk; import org.jets3t.service.S3Service; import org.jets3t.service.S3ServiceException; +import org.jets3t.service.ServiceException; +import org.jets3t.service.StorageObjectsChunk; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Bucket; import org.jets3t.service.model.S3Object; +import org.jets3t.service.model.StorageObject; import org.jets3t.service.security.AWSCredentials; @InterfaceAudience.Private @@ -48,7 +52,9 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { private S3Service s3Service; private S3Bucket bucket; - + public static final Log LOG = + LogFactory.getLog(Jets3tNativeFileSystemStore.class); + @Override public void initialize(URI uri, Configuration conf) throws IOException { S3Credentials s3Credentials = new S3Credentials(); @@ -59,7 +65,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { s3Credentials.getSecretAccessKey()); this.s3Service = new RestS3Service(awsCredentials); } catch (S3ServiceException e) { - handleServiceException(e); + handleS3ServiceException(e); } bucket = new S3Bucket(uri.getHost()); } @@ -80,7 +86,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { } s3Service.putObject(bucket, object); } catch (S3ServiceException e) { - handleServiceException(e); + handleS3ServiceException(e); } finally { if (in != null) { try { @@ -101,53 +107,85 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { object.setContentLength(0); s3Service.putObject(bucket, object); } catch (S3ServiceException e) { - handleServiceException(e); + handleS3ServiceException(e); } } @Override public FileMetadata retrieveMetadata(String key) throws IOException { try { - S3Object object = s3Service.getObjectDetails(bucket, key); + if(LOG.isDebugEnabled()) { + LOG.debug("Getting metadata for key: " + key + " from bucket:" + bucket.getName()); + } + S3Object object = s3Service.getObject(bucket.getName(), key); return new FileMetadata(key, object.getContentLength(), object.getLastModifiedDate().getTime()); } catch (S3ServiceException e) { // Following is brittle. Is there a better way? - if (e.getMessage().contains("ResponseCode=404")) { - return null; + if (e.getS3ErrorCode().matches("NoSuchKey")) { + return null; //return null if key not found } - handleServiceException(e); + handleS3ServiceException(e); return null; //never returned - keep compiler happy } } - + + /** + * @param key + * The key is the object name that is being retrieved from the S3 bucket + * @return + * This method returns null if the key is not found + * @throws IOException + */ + @Override public InputStream retrieve(String key) throws IOException { try { - S3Object object = s3Service.getObject(bucket, key); + if(LOG.isDebugEnabled()) { + LOG.debug("Getting key: " + key + " from bucket:" + bucket.getName()); + } + S3Object object = s3Service.getObject(bucket.getName(), key); return object.getDataInputStream(); } catch (S3ServiceException e) { - handleServiceException(key, e); + handleS3ServiceException(key, e); return null; //never returned - keep compiler happy + } catch (ServiceException e) { + handleServiceException(e); + return null; //return null if key not found } } - + + /** + * + * @param key + * The key is the object name that is being retrieved from the S3 bucket + * @return + * This method returns null if the key is not found + * @throws IOException + */ + @Override public InputStream retrieve(String key, long byteRangeStart) - throws IOException { + throws IOException { try { + if(LOG.isDebugEnabled()) { + LOG.debug("Getting key: " + key + " from bucket:" + bucket.getName() + " with byteRangeStart: " + byteRangeStart); + } S3Object object = s3Service.getObject(bucket, key, null, null, null, null, byteRangeStart, null); return object.getDataInputStream(); } catch (S3ServiceException e) { - handleServiceException(key, e); + handleS3ServiceException(key, e); return null; //never returned - keep compiler happy + } catch (ServiceException e) { + handleServiceException(e); + return null; //return null if key not found } } @Override public PartialListing list(String prefix, int maxListingLength) - throws IOException { + throws IOException { return list(prefix, maxListingLength, null, false); } @@ -158,6 +196,13 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { return list(prefix, recurse ? null : PATH_DELIMITER, maxListingLength, priorLastKey); } + /** + * + * @return + * This method returns null if the list could not be populated + * due to S3 giving ServiceException + * @throws IOException + */ private PartialListing list(String prefix, String delimiter, int maxListingLength, String priorLastKey) throws IOException { @@ -165,52 +210,63 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { if (prefix.length() > 0 && !prefix.endsWith(PATH_DELIMITER)) { prefix += PATH_DELIMITER; } - S3ObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(), + StorageObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(), prefix, delimiter, maxListingLength, priorLastKey); FileMetadata[] fileMetadata = new FileMetadata[chunk.getObjects().length]; for (int i = 0; i < fileMetadata.length; i++) { - S3Object object = chunk.getObjects()[i]; + StorageObject object = chunk.getObjects()[i]; fileMetadata[i] = new FileMetadata(object.getKey(), object.getContentLength(), object.getLastModifiedDate().getTime()); } return new PartialListing(chunk.getPriorLastKey(), fileMetadata, chunk.getCommonPrefixes()); } catch (S3ServiceException e) { - handleServiceException(e); + handleS3ServiceException(e); return null; //never returned - keep compiler happy + } catch (ServiceException e) { + handleServiceException(e); + return null; //return null if list could not be populated } } @Override public void delete(String key) throws IOException { try { + if(LOG.isDebugEnabled()) { + LOG.debug("Deleting key:" + key + "from bucket" + bucket.getName()); + } s3Service.deleteObject(bucket, key); } catch (S3ServiceException e) { - handleServiceException(key, e); + handleS3ServiceException(key, e); } } @Override public void copy(String srcKey, String dstKey) throws IOException { try { + if(LOG.isDebugEnabled()) { + LOG.debug("Copying srcKey: " + srcKey + "to dstKey: " + dstKey + "in bucket: " + bucket.getName()); + } s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(), new S3Object(dstKey), false); } catch (S3ServiceException e) { - handleServiceException(srcKey, e); + handleS3ServiceException(srcKey, e); + } catch (ServiceException e) { + handleServiceException(e); } } @Override public void purge(String prefix) throws IOException { try { - S3Object[] objects = s3Service.listObjects(bucket, prefix, null); + S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, null); for (S3Object object : objects) { s3Service.deleteObject(bucket, object.getKey()); } } catch (S3ServiceException e) { - handleServiceException(e); + handleS3ServiceException(e); } } @@ -219,30 +275,44 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { StringBuilder sb = new StringBuilder("S3 Native Filesystem, "); sb.append(bucket.getName()).append("\n"); try { - S3Object[] objects = s3Service.listObjects(bucket); + S3Object[] objects = s3Service.listObjects(bucket.getName()); for (S3Object object : objects) { sb.append(object.getKey()).append("\n"); } } catch (S3ServiceException e) { - handleServiceException(e); + handleS3ServiceException(e); } System.out.println(sb); } - private void handleServiceException(String key, S3ServiceException e) throws IOException { + private void handleS3ServiceException(String key, S3ServiceException e) throws IOException { if ("NoSuchKey".equals(e.getS3ErrorCode())) { throw new FileNotFoundException("Key '" + key + "' does not exist in S3"); } else { - handleServiceException(e); + handleS3ServiceException(e); } } - private void handleServiceException(S3ServiceException e) throws IOException { + private void handleS3ServiceException(S3ServiceException e) throws IOException { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } else { + if(LOG.isDebugEnabled()) { + LOG.debug("S3 Error code: " + e.getS3ErrorCode() + "; S3 Error message: " + e.getS3ErrorMessage()); + } throw new S3Exception(e); } } + + private void handleServiceException(ServiceException e) throws IOException { + if (e.getCause() instanceof IOException) { + throw (IOException) e.getCause(); + } + else { + if(LOG.isDebugEnabled()) { + LOG.debug("Got ServiceException with Error code: " + e.getErrorCode() + ";and Error message: " + e.getErrorMessage()); + } + } + } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java index 276d6e5fdb8..191baaff410 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java @@ -273,7 +273,7 @@ public class NativeS3FileSystem extends FileSystem { setConf(conf); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.workingDir = - new Path("/user", System.getProperty("user.name")).makeQualified(this); + new Path("/user", System.getProperty("user.name")).makeQualified(this.uri, this.getWorkingDirectory()); } private static NativeFileSystemStore createDefaultStore(Configuration conf) { @@ -511,11 +511,11 @@ public class NativeS3FileSystem extends FileSystem { private FileStatus newFile(FileMetadata meta, Path path) { return new FileStatus(meta.getLength(), false, 1, getDefaultBlockSize(), - meta.getLastModified(), path.makeQualified(this)); + meta.getLastModified(), path.makeQualified(this.getUri(), this.getWorkingDirectory())); } private FileStatus newDirectory(Path path) { - return new FileStatus(0, true, 1, 0, 0, path.makeQualified(this)); + return new FileStatus(0, true, 1, 0, 0, path.makeQualified(this.getUri(), this.getWorkingDirectory())); } @Override diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index c8e62af2a76..a531e543bf6 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -347,6 +347,11 @@ httpclient 4.2.5 + + org.apache.httpcomponents + httpcore + 4.2.5 + commons-codec commons-codec @@ -554,7 +559,7 @@ net.java.dev.jets3t jets3t - 0.6.1 + 0.9.0 org.apache.mina