mirror of
https://github.com/apache/druid.git
synced 2025-03-06 17:29:45 +00:00
separate segment mover and segment archiver
This commit is contained in:
parent
123bddd615
commit
6b90372002
2
pom.xml
2
pom.xml
@ -41,7 +41,7 @@
|
|||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<metamx.java-util.version>0.25.1</metamx.java-util.version>
|
<metamx.java-util.version>0.25.1</metamx.java-util.version>
|
||||||
<apache.curator.version>2.1.0-incubating</apache.curator.version>
|
<apache.curator.version>2.1.0-incubating</apache.curator.version>
|
||||||
<druid.api.version>0.1.6</druid.api.version>
|
<druid.api.version>0.1.7</druid.api.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<modules>
|
<modules>
|
||||||
|
@ -0,0 +1,59 @@
|
|||||||
|
/*
|
||||||
|
* Druid - a distributed column store.
|
||||||
|
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||||
|
*
|
||||||
|
* This program is free software; you can redistribute it and/or
|
||||||
|
* modify it under the terms of the GNU General Public License
|
||||||
|
* as published by the Free Software Foundation; either version 2
|
||||||
|
* of the License, or (at your option) any later version.
|
||||||
|
*
|
||||||
|
* This program is distributed in the hope that it will be useful,
|
||||||
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
* GNU General Public License for more details.
|
||||||
|
*
|
||||||
|
* You should have received a copy of the GNU General Public License
|
||||||
|
* along with this program; if not, write to the Free Software
|
||||||
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package io.druid.storage.s3;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import com.google.inject.Inject;
|
||||||
|
import com.metamx.common.MapUtils;
|
||||||
|
import io.druid.segment.loading.DataSegmentArchiver;
|
||||||
|
import io.druid.segment.loading.SegmentLoadingException;
|
||||||
|
import io.druid.timeline.DataSegment;
|
||||||
|
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
|
||||||
|
|
||||||
|
|
||||||
|
public class S3DataSegmentArchiver extends S3DataSegmentMover implements DataSegmentArchiver
|
||||||
|
{
|
||||||
|
private final S3DataSegmentArchiverConfig config;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
public S3DataSegmentArchiver(
|
||||||
|
RestS3Service s3Client,
|
||||||
|
S3DataSegmentArchiverConfig config
|
||||||
|
)
|
||||||
|
{
|
||||||
|
super(s3Client);
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataSegment archive(DataSegment segment) throws SegmentLoadingException
|
||||||
|
{
|
||||||
|
String targetS3Bucket = config.getArchiveBucket();
|
||||||
|
String targetS3Path = MapUtils.getString(segment.getLoadSpec(), "key");
|
||||||
|
|
||||||
|
return move(
|
||||||
|
segment,
|
||||||
|
ImmutableMap.<String, Object>of(
|
||||||
|
"bucket", targetS3Bucket,
|
||||||
|
"key", targetS3Path
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -21,7 +21,7 @@ package io.druid.storage.s3;
|
|||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
|
||||||
public class S3DataSegmentMoverConfig
|
public class S3DataSegmentArchiverConfig
|
||||||
{
|
{
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
public String archiveBucket = "";
|
public String archiveBucket = "";
|
@ -37,20 +37,17 @@ public class S3DataSegmentMover implements DataSegmentMover
|
|||||||
private static final Logger log = new Logger(S3DataSegmentMover.class);
|
private static final Logger log = new Logger(S3DataSegmentMover.class);
|
||||||
|
|
||||||
private final RestS3Service s3Client;
|
private final RestS3Service s3Client;
|
||||||
private final S3DataSegmentMoverConfig config;
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public S3DataSegmentMover(
|
public S3DataSegmentMover(
|
||||||
RestS3Service s3Client,
|
RestS3Service s3Client
|
||||||
S3DataSegmentMoverConfig config
|
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.s3Client = s3Client;
|
this.s3Client = s3Client;
|
||||||
this.config = config;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataSegment move(DataSegment segment) throws SegmentLoadingException
|
public DataSegment move(DataSegment segment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
Map<String, Object> loadSpec = segment.getLoadSpec();
|
Map<String, Object> loadSpec = segment.getLoadSpec();
|
||||||
@ -58,10 +55,15 @@ public class S3DataSegmentMover implements DataSegmentMover
|
|||||||
String s3Path = MapUtils.getString(loadSpec, "key");
|
String s3Path = MapUtils.getString(loadSpec, "key");
|
||||||
String s3DescriptorPath = S3Utils.descriptorPathForSegmentPath(s3Path);
|
String s3DescriptorPath = S3Utils.descriptorPathForSegmentPath(s3Path);
|
||||||
|
|
||||||
final String s3ArchiveBucket = config.getArchiveBucket();
|
final String targetS3Bucket = MapUtils.getString(targetLoadSpec, "bucket");
|
||||||
|
final String targetS3Path = MapUtils.getString(targetLoadSpec, "key");
|
||||||
|
String targetS3DescriptorPath = S3Utils.descriptorPathForSegmentPath(targetS3Path);
|
||||||
|
|
||||||
if (s3ArchiveBucket.isEmpty()) {
|
if (targetS3Bucket.isEmpty()) {
|
||||||
throw new SegmentLoadingException("S3 archive bucket not specified");
|
throw new SegmentLoadingException("Target S3 bucket is not specified");
|
||||||
|
}
|
||||||
|
if (targetS3Path.isEmpty()) {
|
||||||
|
throw new SegmentLoadingException("Target S3 path is not specified");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (s3Client.isObjectInBucket(s3Bucket, s3Path)) {
|
if (s3Client.isObjectInBucket(s3Bucket, s3Path)) {
|
||||||
@ -69,26 +71,28 @@ public class S3DataSegmentMover implements DataSegmentMover
|
|||||||
"Moving index file[s3://%s/%s] to [s3://%s/%s]",
|
"Moving index file[s3://%s/%s] to [s3://%s/%s]",
|
||||||
s3Bucket,
|
s3Bucket,
|
||||||
s3Path,
|
s3Path,
|
||||||
s3ArchiveBucket,
|
targetS3Bucket,
|
||||||
s3Path
|
targetS3Path
|
||||||
);
|
);
|
||||||
s3Client.moveObject(s3Bucket, s3Path, s3ArchiveBucket, new S3Object(s3Path), false);
|
s3Client.moveObject(s3Bucket, s3Path, targetS3Bucket, new S3Object(targetS3Path), false);
|
||||||
}
|
}
|
||||||
if (s3Client.isObjectInBucket(s3Bucket, s3DescriptorPath)) {
|
if (s3Client.isObjectInBucket(s3Bucket, s3DescriptorPath)) {
|
||||||
log.info(
|
log.info(
|
||||||
"Moving descriptor file[s3://%s/%s] to [s3://%s/%s]",
|
"Moving descriptor file[s3://%s/%s] to [s3://%s/%s]",
|
||||||
s3Bucket,
|
s3Bucket,
|
||||||
s3DescriptorPath,
|
s3DescriptorPath,
|
||||||
s3ArchiveBucket,
|
targetS3Bucket,
|
||||||
s3DescriptorPath
|
targetS3DescriptorPath
|
||||||
);
|
);
|
||||||
s3Client.moveObject(s3Bucket, s3DescriptorPath, s3ArchiveBucket, new S3Object(s3DescriptorPath), false);
|
s3Client.moveObject(s3Bucket, s3DescriptorPath, targetS3Bucket, new S3Object(targetS3DescriptorPath), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
return segment.withLoadSpec(
|
return segment.withLoadSpec(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.putAll(loadSpec)
|
.putAll(loadSpec)
|
||||||
.put("bucket", s3ArchiveBucket).build()
|
.put("bucket", targetS3Bucket)
|
||||||
|
.put("key", targetS3Path)
|
||||||
|
.build()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
catch (ServiceException e) {
|
catch (ServiceException e) {
|
||||||
|
@ -52,9 +52,10 @@ public class S3StorageDruidModule implements DruidModule
|
|||||||
Binders.dataSegmentPullerBinder(binder).addBinding("s3_zip").to(S3DataSegmentPuller.class).in(LazySingleton.class);
|
Binders.dataSegmentPullerBinder(binder).addBinding("s3_zip").to(S3DataSegmentPuller.class).in(LazySingleton.class);
|
||||||
Binders.dataSegmentKillerBinder(binder).addBinding("s3_zip").to(S3DataSegmentKiller.class).in(LazySingleton.class);
|
Binders.dataSegmentKillerBinder(binder).addBinding("s3_zip").to(S3DataSegmentKiller.class).in(LazySingleton.class);
|
||||||
Binders.dataSegmentMoverBinder(binder).addBinding("s3_zip").to(S3DataSegmentMover.class).in(LazySingleton.class);
|
Binders.dataSegmentMoverBinder(binder).addBinding("s3_zip").to(S3DataSegmentMover.class).in(LazySingleton.class);
|
||||||
|
Binders.dataSegmentArchiverBinder(binder).addBinding("s3_zip").to(S3DataSegmentArchiver.class).in(LazySingleton.class);
|
||||||
Binders.dataSegmentPusherBinder(binder).addBinding("s3").to(S3DataSegmentPusher.class).in(LazySingleton.class);
|
Binders.dataSegmentPusherBinder(binder).addBinding("s3").to(S3DataSegmentPusher.class).in(LazySingleton.class);
|
||||||
JsonConfigProvider.bind(binder, "druid.storage", S3DataSegmentPusherConfig.class);
|
JsonConfigProvider.bind(binder, "druid.storage", S3DataSegmentPusherConfig.class);
|
||||||
JsonConfigProvider.bind(binder, "druid.storage", S3DataSegmentMoverConfig.class);
|
JsonConfigProvider.bind(binder, "druid.storage", S3DataSegmentArchiverConfig.class);
|
||||||
|
|
||||||
Binders.taskLogsBinder(binder).addBinding("s3").to(S3TaskLogs.class);
|
Binders.taskLogsBinder(binder).addBinding("s3").to(S3TaskLogs.class);
|
||||||
JsonConfigProvider.bind(binder, "druid.indexer.logs", S3TaskLogsConfig.class);
|
JsonConfigProvider.bind(binder, "druid.indexer.logs", S3TaskLogsConfig.class);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user