mirror of https://github.com/apache/druid.git
Merge pull request #336 from metamx/baseKey-casing
config casing consistency
This commit is contained in:
commit
e1b4652100
|
@ -285,10 +285,10 @@ This deep storage is used to interface with Amazon's S3.
|
|||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`druid.storage.bucket`|S3 bucket name.|none|
|
||||
|`druid.storage.basekey`|S3 object key prefix for storage.|none|
|
||||
|`druid.storage.baseKey`|S3 object key prefix for storage.|none|
|
||||
|`druid.storage.disableAcl`|Boolean flag for ACL.|false|
|
||||
|`druid.storage.archiveBucket`|S3 bucket name for archiving when running the indexing-service *archive task*.|none|
|
||||
|`druid.storage.archiveBasekey`|S3 object key prefix for archiving.|none|
|
||||
|`druid.storage.archiveBaseKey`|S3 object key prefix for archiving.|none|
|
||||
|
||||
#### HDFS Deep Storage
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ public class S3DataSegmentArchiver extends S3DataSegmentMover implements DataSeg
|
|||
public DataSegment archive(DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
String targetS3Bucket = config.getArchiveBucket();
|
||||
String targetS3BaseKey = config.getArchiveBasekey();
|
||||
String targetS3BaseKey = config.getArchiveBaseKey();
|
||||
|
||||
return move(
|
||||
segment,
|
||||
|
|
|
@ -27,15 +27,15 @@ public class S3DataSegmentArchiverConfig
|
|||
public String archiveBucket = "";
|
||||
|
||||
@JsonProperty
|
||||
public String archiveBasekey = "";
|
||||
public String archiveBaseKey = "";
|
||||
|
||||
public String getArchiveBucket()
|
||||
{
|
||||
return archiveBucket;
|
||||
}
|
||||
|
||||
public String getArchiveBasekey()
|
||||
public String getArchiveBaseKey()
|
||||
{
|
||||
return archiveBasekey;
|
||||
return archiveBaseKey;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue