mirror of https://github.com/apache/druid.git
consolidate path functions
This commit is contained in:
parent
a417cd5df2
commit
cd7a941f83
|
@ -27,7 +27,6 @@ import io.druid.segment.loading.SegmentLoadingException;
|
|||
import io.druid.timeline.DataSegment;
|
||||
import org.jets3t.service.ServiceException;
|
||||
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
|
||||
import org.jets3t.service.model.S3Object;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -54,7 +53,7 @@ public class S3DataSegmentKiller implements DataSegmentKiller
|
|||
Map<String, Object> loadSpec = segment.getLoadSpec();
|
||||
String s3Bucket = MapUtils.getString(loadSpec, "bucket");
|
||||
String s3Path = MapUtils.getString(loadSpec, "key");
|
||||
String s3DescriptorPath = s3Path.substring(0, s3Path.lastIndexOf("/")) + "/descriptor.json";
|
||||
String s3DescriptorPath = S3Utils.descriptorPathForSegmentPath(s3Path);
|
||||
|
||||
if (s3Client.isObjectInBucket(s3Bucket, s3Path)) {
|
||||
log.info("Removing index file[s3://%s/%s] from s3!", s3Bucket, s3Path);
|
||||
|
|
|
@ -56,7 +56,7 @@ public class S3DataSegmentMover implements DataSegmentMover
|
|||
Map<String, Object> loadSpec = segment.getLoadSpec();
|
||||
String s3Bucket = MapUtils.getString(loadSpec, "bucket");
|
||||
String s3Path = MapUtils.getString(loadSpec, "key");
|
||||
String s3DescriptorPath = s3Path.substring(0, s3Path.lastIndexOf("/")) + "/descriptor.json";
|
||||
String s3DescriptorPath = S3Utils.descriptorPathForSegmentPath(s3Path);
|
||||
|
||||
final String s3ArchiveBucket = config.getArchiveBucket();
|
||||
|
||||
|
|
|
@ -90,8 +90,11 @@ public class S3DataSegmentPusher implements DataSegmentPusher
|
|||
S3Object toPush = new S3Object(zipOutFile);
|
||||
|
||||
final String outputBucket = config.getBucket();
|
||||
final String s3Path = outputKey + "/index.zip";
|
||||
final String s3DescriptorPath = S3Utils.descriptorPathForSegmentPath(s3Path);
|
||||
|
||||
toPush.setBucketName(outputBucket);
|
||||
toPush.setKey(outputKey + "/index.zip");
|
||||
toPush.setKey(s3Path);
|
||||
if (!config.getDisableAcl()) {
|
||||
toPush.setAcl(AccessControlList.REST_CANNED_AUTHENTICATED_READ);
|
||||
}
|
||||
|
@ -116,7 +119,7 @@ public class S3DataSegmentPusher implements DataSegmentPusher
|
|||
Files.copy(ByteStreams.newInputStreamSupplier(jsonMapper.writeValueAsBytes(inSegment)), descriptorFile);
|
||||
S3Object descriptorObject = new S3Object(descriptorFile);
|
||||
descriptorObject.setBucketName(outputBucket);
|
||||
descriptorObject.setKey(outputKey + "/descriptor.json");
|
||||
descriptorObject.setKey(s3DescriptorPath);
|
||||
if (!config.getDisableAcl()) {
|
||||
descriptorObject.setAcl(GSAccessControlList.REST_CANNED_BUCKET_OWNER_FULL_CONTROL);
|
||||
}
|
||||
|
@ -142,4 +145,4 @@ public class S3DataSegmentPusher implements DataSegmentPusher
|
|||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,4 +96,8 @@ public class S3Utils
|
|||
return true;
|
||||
}
|
||||
|
||||
public static String descriptorPathForSegmentPath(String s3Path)
|
||||
{
|
||||
return s3Path.substring(0, s3Path.lastIndexOf("/")) + "/descriptor.json";
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue