mirror of https://github.com/apache/druid.git
Merge branch 'master' of github.com:metamx/druid
This commit is contained in:
commit
4e2f6380e6
2
build.sh
2
build.sh
|
@ -10,8 +10,6 @@ SCRIPT_DIR=`pwd`
|
||||||
popd
|
popd
|
||||||
|
|
||||||
VERSION=`cat pom.xml | grep version | head -4 | tail -1 | sed 's_.*<version>\([^<]*\)</version>.*_\1_'`
|
VERSION=`cat pom.xml | grep version | head -4 | tail -1 | sed 's_.*<version>\([^<]*\)</version>.*_\1_'`
|
||||||
#TAR_FILE=${SCRIPT_DIR}/${PROJECT}-${VERSION}.tar.gz
|
|
||||||
#rm -f ${TAR_FILE}
|
|
||||||
|
|
||||||
echo Using Version[${VERSION}]
|
echo Using Version[${VERSION}]
|
||||||
|
|
||||||
|
|
|
@ -19,15 +19,14 @@
|
||||||
|
|
||||||
package com.metamx.druid.common.s3;
|
package com.metamx.druid.common.s3;
|
||||||
|
|
||||||
|
import com.google.common.base.Throwables;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import org.jets3t.service.S3ServiceException;
|
import org.jets3t.service.ServiceException;
|
||||||
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
|
|
||||||
import org.jets3t.service.model.S3Bucket;
|
|
||||||
import org.jets3t.service.model.S3Object;
|
import org.jets3t.service.model.S3Object;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.util.Random;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -36,37 +35,6 @@ public class S3Utils
|
||||||
{
|
{
|
||||||
private static final Logger log = new Logger(S3Utils.class);
|
private static final Logger log = new Logger(S3Utils.class);
|
||||||
|
|
||||||
public static void putFileToS3(
|
|
||||||
File localFile, RestS3Service s3Client, String outputS3Bucket, String outputS3Path
|
|
||||||
)
|
|
||||||
throws S3ServiceException, IOException, NoSuchAlgorithmException
|
|
||||||
{
|
|
||||||
S3Object s3Obj = new S3Object(localFile);
|
|
||||||
s3Obj.setBucketName(outputS3Bucket);
|
|
||||||
s3Obj.setKey(outputS3Path);
|
|
||||||
|
|
||||||
log.info("Uploading file[%s] to [s3://%s/%s]", localFile, s3Obj.getBucketName(), s3Obj.getKey());
|
|
||||||
s3Client.putObject(new S3Bucket(outputS3Bucket), s3Obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void putFileToS3WrapExceptions(
|
|
||||||
File localFile, RestS3Service s3Client, String outputS3Bucket, String outputS3Path
|
|
||||||
)
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
putFileToS3(localFile, s3Client, outputS3Bucket, outputS3Path);
|
|
||||||
}
|
|
||||||
catch (S3ServiceException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
catch (NoSuchAlgorithmException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void closeStreamsQuietly(S3Object s3Obj)
|
public static void closeStreamsQuietly(S3Object s3Obj)
|
||||||
{
|
{
|
||||||
if (s3Obj == null) {
|
if (s3Obj == null) {
|
||||||
|
@ -80,4 +48,52 @@ public class S3Utils
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retries S3 operations that fail due to io-related exceptions. Service-level exceptions (access denied, file not
|
||||||
|
* found, etc) are not retried.
|
||||||
|
*/
|
||||||
|
public static <T> T retryS3Operation(Callable<T> f) throws ServiceException, InterruptedException
|
||||||
|
{
|
||||||
|
int nTry = 0;
|
||||||
|
final int maxTries = 3;
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
nTry++;
|
||||||
|
return f.call();
|
||||||
|
}
|
||||||
|
catch (IOException e) {
|
||||||
|
if (nTry <= maxTries) {
|
||||||
|
awaitNextRetry(e, nTry);
|
||||||
|
} else {
|
||||||
|
throw Throwables.propagate(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (ServiceException e) {
|
||||||
|
if (nTry <= maxTries &&
|
||||||
|
(e.getCause() instanceof IOException ||
|
||||||
|
(e.getErrorCode() != null && e.getErrorCode().equals("RequestTimeout")))) {
|
||||||
|
awaitNextRetry(e, nTry);
|
||||||
|
} else {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw Throwables.propagate(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void awaitNextRetry(Exception e, int nTry) throws InterruptedException
|
||||||
|
{
|
||||||
|
final long baseSleepMillis = 1000;
|
||||||
|
final double fuzziness = 0.2;
|
||||||
|
final long sleepMillis = Math.max(
|
||||||
|
baseSleepMillis,
|
||||||
|
(long) (baseSleepMillis * Math.pow(2, nTry) *
|
||||||
|
(1 + new Random().nextGaussian() * fuzziness))
|
||||||
|
);
|
||||||
|
log.info(e, "S3 fail on try %d, retrying in %,dms.", nTry, sleepMillis);
|
||||||
|
Thread.sleep(sleepMillis);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package com.metamx.druid.loading;
|
package com.metamx.druid.loading;
|
||||||
|
|
||||||
|
import com.google.common.base.Throwables;
|
||||||
import com.google.common.io.ByteStreams;
|
import com.google.common.io.ByteStreams;
|
||||||
import com.google.common.io.Closeables;
|
import com.google.common.io.Closeables;
|
||||||
import com.google.common.io.Files;
|
import com.google.common.io.Files;
|
||||||
|
@ -30,16 +31,17 @@ import com.metamx.druid.client.DataSegment;
|
||||||
import com.metamx.druid.common.s3.S3Utils;
|
import com.metamx.druid.common.s3.S3Utils;
|
||||||
import com.metamx.druid.utils.CompressionUtils;
|
import com.metamx.druid.utils.CompressionUtils;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.jets3t.service.S3ServiceException;
|
|
||||||
import org.jets3t.service.ServiceException;
|
import org.jets3t.service.ServiceException;
|
||||||
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
|
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
|
||||||
import org.jets3t.service.model.S3Bucket;
|
|
||||||
import org.jets3t.service.model.S3Object;
|
import org.jets3t.service.model.S3Object;
|
||||||
|
import org.jets3t.service.model.StorageObject;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Random;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
import java.util.zip.GZIPInputStream;
|
import java.util.zip.GZIPInputStream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -62,9 +64,9 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void getSegmentFiles(DataSegment segment, File outDir) throws SegmentLoadingException
|
public void getSegmentFiles(final DataSegment segment, final File outDir) throws SegmentLoadingException
|
||||||
{
|
{
|
||||||
S3Coords s3Coords = new S3Coords(segment);
|
final S3Coords s3Coords = new S3Coords(segment);
|
||||||
|
|
||||||
log.info("Pulling index at path[%s] to outDir[%s]", s3Coords, outDir);
|
log.info("Pulling index at path[%s] to outDir[%s]", s3Coords, outDir);
|
||||||
|
|
||||||
|
@ -80,11 +82,18 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
||||||
throw new ISE("outDir[%s] must be a directory.", outDir);
|
throw new ISE("outDir[%s] must be a directory.", outDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
S3Utils.retryS3Operation(
|
||||||
|
new Callable<Void>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Void call() throws Exception
|
||||||
|
{
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
S3Object s3Obj = null;
|
S3Object s3Obj = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
s3Obj = s3Client.getObject(new S3Bucket(s3Coords.bucket), s3Coords.path);
|
s3Obj = s3Client.getObject(s3Coords.bucket, s3Coords.path);
|
||||||
|
|
||||||
InputStream in = null;
|
InputStream in = null;
|
||||||
try {
|
try {
|
||||||
|
@ -99,22 +108,26 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
||||||
ByteStreams.copy(in, Files.newOutputStreamSupplier(new File(outDir, toFilename(key, ""))));
|
ByteStreams.copy(in, Files.newOutputStreamSupplier(new File(outDir, toFilename(key, ""))));
|
||||||
}
|
}
|
||||||
log.info("Pull of file[%s] completed in %,d millis", s3Obj, System.currentTimeMillis() - startTime);
|
log.info("Pull of file[%s] completed in %,d millis", s3Obj, System.currentTimeMillis() - startTime);
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
FileUtils.deleteDirectory(outDir);
|
FileUtils.deleteDirectory(outDir);
|
||||||
throw new SegmentLoadingException(e, "Problem decompressing object[%s]", s3Obj);
|
throw new IOException(String.format("Problem decompressing object[%s]", s3Obj), e);
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
Closeables.closeQuietly(in);
|
Closeables.closeQuietly(in);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
|
||||||
throw new SegmentLoadingException(e, e.getMessage());
|
|
||||||
}
|
|
||||||
finally {
|
finally {
|
||||||
S3Utils.closeStreamsQuietly(s3Obj);
|
S3Utils.closeStreamsQuietly(s3Obj);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new SegmentLoadingException(e, e.getMessage());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String toFilename(String key, final String suffix)
|
private String toFilename(String key, final String suffix)
|
||||||
|
@ -124,11 +137,23 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
||||||
return filename;
|
return filename;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isObjectInBucket(S3Coords coords) throws SegmentLoadingException
|
private boolean isObjectInBucket(final S3Coords coords) throws SegmentLoadingException
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
|
return S3Utils.retryS3Operation(
|
||||||
|
new Callable<Boolean>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Boolean call() throws Exception
|
||||||
|
{
|
||||||
return s3Client.isObjectInBucket(coords.bucket, coords.path);
|
return s3Client.isObjectInBucket(coords.bucket, coords.path);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (InterruptedException e) {
|
||||||
|
throw Throwables.propagate(e);
|
||||||
|
}
|
||||||
catch (ServiceException e) {
|
catch (ServiceException e) {
|
||||||
throw new SegmentLoadingException(e, "S3 fail! Key[%s]", coords);
|
throw new SegmentLoadingException(e, "S3 fail! Key[%s]", coords);
|
||||||
}
|
}
|
||||||
|
@ -137,12 +162,24 @@ public class S3DataSegmentPuller implements DataSegmentPuller
|
||||||
@Override
|
@Override
|
||||||
public long getLastModified(DataSegment segment) throws SegmentLoadingException
|
public long getLastModified(DataSegment segment) throws SegmentLoadingException
|
||||||
{
|
{
|
||||||
S3Coords coords = new S3Coords(segment);
|
final S3Coords coords = new S3Coords(segment);
|
||||||
try {
|
try {
|
||||||
S3Object objDetails = s3Client.getObjectDetails(new S3Bucket(coords.bucket), coords.path);
|
final StorageObject objDetails = S3Utils.retryS3Operation(
|
||||||
|
new Callable<StorageObject>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public StorageObject call() throws Exception
|
||||||
|
{
|
||||||
|
return s3Client.getObjectDetails(coords.bucket, coords.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
return objDetails.getLastModifiedDate().getTime();
|
return objDetails.getLastModifiedDate().getTime();
|
||||||
}
|
}
|
||||||
catch (S3ServiceException e) {
|
catch (InterruptedException e) {
|
||||||
|
throw Throwables.propagate(e);
|
||||||
|
}
|
||||||
|
catch (ServiceException e) {
|
||||||
throw new SegmentLoadingException(e, e.getMessage());
|
throw new SegmentLoadingException(e, e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,4 +3,4 @@
|
||||||
#
|
#
|
||||||
# Script to upload tarball of assembly build to static.druid.io for serving
|
# Script to upload tarball of assembly build to static.druid.io for serving
|
||||||
#
|
#
|
||||||
s3cmd put services/target/druid-services-*-bin.tar.gz s3://static.druid.io/artifacts/
|
s3cmd put services/target/druid-services-*-bin.tar.gz s3://static.druid.io/artifacts/releases
|
||||||
|
|
Loading…
Reference in New Issue