From 2103906a482671dacf40fa8542fa625e9a08ce9b Mon Sep 17 00:00:00 2001 From: fjy Date: Tue, 5 Jan 2016 16:42:05 -0800 Subject: [PATCH] add pusher tests for all deep storages --- extensions/azure-extensions/pom.xml | 19 +- .../storage/azure/AzureAccountConfig.java | 25 +++ .../storage/azure/AzureDataSegmentPusher.java | 36 ++-- .../azure/AzureDataSegmentPusherTest.java | 50 +++++- extensions/cloudfiles-extensions/pom.xml | 162 +++++++++--------- .../CloudFilesDataSegmentPusher.java | 30 ++-- .../CloudFilesDataSegmentPusherConfig.java | 21 ++- .../CloudFilesDataSegmentPusherTest.java | 93 ++++++++++ .../hdfs/HdfsDataSegmentPusherConfig.java | 7 +- .../hdfs/HdfsDataSegmentPusherTest.java | 78 +++++++++ .../s3/S3DataSegmentArchiverConfig.java | 4 +- .../storage/s3/S3DataSegmentPusherConfig.java | 21 ++- .../storage/s3/S3DataSegmentPusherTest.java | 87 ++++++++++ 13 files changed, 511 insertions(+), 122 deletions(-) create mode 100644 extensions/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java create mode 100644 extensions/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java create mode 100644 extensions/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java diff --git a/extensions/azure-extensions/pom.xml b/extensions/azure-extensions/pom.xml index afe66809c9c..df00f7ee7d3 100644 --- a/extensions/azure-extensions/pom.xml +++ b/extensions/azure-extensions/pom.xml @@ -16,7 +16,8 @@ ~ limitations under the License. --> - + 4.0.0 io.druid.extensions druid-azure-extensions @@ -36,11 +37,11 @@ druid-api - - com.microsoft.azure - azure-storage - 2.1.0 - + + com.microsoft.azure + azure-storage + 2.1.0 + @@ -48,6 +49,12 @@ junit test + + io.druid + druid-server + ${project.parent.version} + test + org.easymock easymock diff --git a/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java b/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java index e2dbf6c1c57..342f97c2af8 100644 --- a/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java +++ b/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java @@ -45,6 +45,31 @@ public class AzureAccountConfig @NotNull private String container; + public void setProtocol(String protocol) + { + this.protocol = protocol; + } + + public void setMaxTries(int maxTries) + { + this.maxTries = maxTries; + } + + public void setAccount(String account) + { + this.account = account; + } + + public void setKey(String key) + { + this.key = key; + } + + public void setContainer(String container) + { + this.container = container; + } + public String getProtocol() { return protocol; } public int getMaxTries() { return maxTries; } diff --git a/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java b/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java index 9acd273644d..107779e9ad6 100644 --- a/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java +++ b/extensions/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java @@ -64,15 +64,6 @@ public class AzureDataSegmentPusher implements DataSegmentPusher return null; } - public File createCompressedSegmentDataFile(final File indexFilesDir) throws IOException - { - final File zipOutFile = File.createTempFile("index", ".zip"); - CompressionUtils.zip(indexFilesDir, zipOutFile); - - return zipOutFile; - - } - public File createSegmentDescriptorFile(final ObjectMapper jsonMapper, final DataSegment segment) throws IOException { @@ -98,6 +89,7 @@ public class AzureDataSegmentPusher implements DataSegmentPusher public DataSegment uploadDataSegment( DataSegment segment, final int version, + final long size, final File compressedSegmentData, final File descriptorFile, final Map azurePaths @@ -108,7 +100,7 @@ public class AzureDataSegmentPusher implements DataSegmentPusher azureStorage.uploadBlob(descriptorFile, config.getContainer(), azurePaths.get("descriptor")); final DataSegment outSegment = segment - .withSize(compressedSegmentData.length()) + .withSize(size) .withLoadSpec( ImmutableMap.of( "type", @@ -137,18 +129,23 @@ public class AzureDataSegmentPusher implements DataSegmentPusher log.info("Uploading [%s] to Azure.", indexFilesDir); final int version = SegmentUtils.getVersionFromDir(indexFilesDir); - final File compressedSegmentData = createCompressedSegmentDataFile(indexFilesDir); - final File descriptorFile = createSegmentDescriptorFile(jsonMapper, segment); - final Map azurePaths = getAzurePaths(segment); + File zipOutFile = null; + File descriptorFile = null; try { + final File outFile = zipOutFile = File.createTempFile("index", ".zip"); + final long size = CompressionUtils.zip(indexFilesDir, zipOutFile); + + final File descFile = descriptorFile = createSegmentDescriptorFile(jsonMapper, segment); + final Map azurePaths = getAzurePaths(segment); + return AzureUtils.retryAzureOperation( new Callable() { @Override public DataSegment call() throws Exception { - return uploadDataSegment(segment, version, compressedSegmentData, descriptorFile, azurePaths); + return uploadDataSegment(segment, version, size, outFile, descFile, azurePaths); } }, config.getMaxTries() @@ -157,5 +154,16 @@ public class AzureDataSegmentPusher implements DataSegmentPusher catch (Exception e) { throw Throwables.propagate(e); } + finally { + if (zipOutFile != null) { + log.info("Deleting zipped index File[%s]", zipOutFile); + zipOutFile.delete(); + } + + if (descriptorFile != null) { + log.info("Deleting descriptor file[%s]", descriptorFile); + descriptorFile.delete(); + } + } } } diff --git a/extensions/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java index a5ea1b9e283..60bf5bb8786 100644 --- a/extensions/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -21,27 +21,36 @@ package io.druid.storage.azure; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.io.Files; import com.metamx.common.MapUtils; import com.microsoft.azure.storage.StorageException; +import io.druid.jackson.DefaultObjectMapper; import io.druid.segment.loading.DataSegmentPusherUtil; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; import org.joda.time.Interval; +import org.junit.Assert; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.util.Map; -import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.junit.Assert.assertEquals; public class AzureDataSegmentPusherTest extends EasyMockSupport { + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + private static final String containerName = "container"; private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; private static final DataSegment dataSegment = new DataSegment( @@ -64,9 +73,40 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport public void before() { azureStorage = createMock(AzureStorage.class); - azureAccountConfig = createMock(AzureAccountConfig.class); - jsonMapper = createMock(ObjectMapper.class); + azureAccountConfig = new AzureAccountConfig(); + azureAccountConfig.setAccount("account"); + azureAccountConfig.setContainer("container"); + jsonMapper = new DefaultObjectMapper(); + } + + @Test + public void testPush() throws Exception + { + AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig, jsonMapper); + + // Create a mock segment on disk + File tmp = tempFolder.newFile("version.bin"); + + final byte[] data = new byte[]{0x0, 0x0, 0x0, 0x1}; + Files.write(data, tmp); + final long size = data.length; + + DataSegment segmentToPush = new DataSegment( + "foo", + new Interval("2015/2016"), + "0", + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), + new NoneShardSpec(), + 0, + size + ); + + DataSegment segment = pusher.push(tempFolder.getRoot(), segmentToPush); + + Assert.assertEquals(segmentToPush.getSize(), segment.getSize()); } @Test @@ -93,7 +133,6 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport final File descriptorFile = new File("descriptor.json"); final Map azurePaths = pusher.getAzurePaths(dataSegment); - expect(azureAccountConfig.getContainer()).andReturn(containerName).times(3); azureStorage.uploadBlob(compressedSegmentData, containerName, azurePaths.get("index")); expectLastCall(); azureStorage.uploadBlob(descriptorFile, containerName, azurePaths.get("descriptor")); @@ -104,6 +143,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport DataSegment pushedDataSegment = pusher.uploadDataSegment( dataSegment, version, + 0, // empty file compressedSegmentData, descriptorFile, azurePaths @@ -116,7 +156,5 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport assertEquals(azurePaths.get("index"), MapUtils.getString(loadSpec, "blobPath")); verifyAll(); - } - } diff --git a/extensions/cloudfiles-extensions/pom.xml b/extensions/cloudfiles-extensions/pom.xml index 7358d4b60b5..0766b5765d9 100644 --- a/extensions/cloudfiles-extensions/pom.xml +++ b/extensions/cloudfiles-extensions/pom.xml @@ -20,87 +20,93 @@ --> - 4.0.0 - io.druid.extensions - druid-cloudfiles-extensions - druid-cloudfiles-extensions - druid-cloudfiles-extensions + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" + xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> + 4.0.0 + io.druid.extensions + druid-cloudfiles-extensions + druid-cloudfiles-extensions + druid-cloudfiles-extensions - - io.druid - druid - 0.9.0-SNAPSHOT - ../../pom.xml - + + io.druid + druid + 0.9.0-SNAPSHOT + ../../pom.xml + - - UTF-8 - 1.9.1 - - 3.0 - + + UTF-8 + 1.9.1 + + 3.0 + - - - io.druid - druid-api - - - com.google.inject - guice - ${guice.version} - - - - com.google.inject.extensions - guice-servlet - ${guice.version} - - - - com.google.inject.extensions - guice-multibindings - ${guice.version} - - - - - org.apache.jclouds.driver - jclouds-slf4j - ${jclouds.version} - - - org.apache.jclouds.driver - jclouds-sshj - ${jclouds.version} - - - - org.apache.jclouds.provider - rackspace-cloudfiles-us - ${jclouds.version} - - - - org.apache.jclouds.provider - rackspace-cloudfiles-uk - ${jclouds.version} - + + + io.druid + druid-api + + + com.google.inject + guice + ${guice.version} + + + + com.google.inject.extensions + guice-servlet + ${guice.version} + + + + com.google.inject.extensions + guice-multibindings + ${guice.version} + + + + + org.apache.jclouds.driver + jclouds-slf4j + ${jclouds.version} + + + org.apache.jclouds.driver + jclouds-sshj + ${jclouds.version} + + + + org.apache.jclouds.provider + rackspace-cloudfiles-us + ${jclouds.version} + + + + org.apache.jclouds.provider + rackspace-cloudfiles-uk + ${jclouds.version} + - - - junit - junit - test - - - org.easymock - easymock - test - - + + + junit + junit + test + + + org.easymock + easymock + test + + + io.druid + druid-server + ${project.parent.version} + test + + diff --git a/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java b/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java index b2dca8f698b..fa34d3ac53d 100644 --- a/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java +++ b/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusher.java @@ -68,13 +68,17 @@ public class CloudFilesDataSegmentPusher implements DataSegmentPusher public DataSegment push(final File indexFilesDir, final DataSegment inSegment) throws IOException { final String segmentPath = CloudFilesUtils.buildCloudFilesPath(this.config.getBasePath(), inSegment); - final File descriptorFile = File.createTempFile("descriptor", ".json"); - final File zipOutFile = File.createTempFile("druid", "index.zip"); - final long indexSize = CompressionUtils.zip(indexFilesDir, zipOutFile); - log.info("Copying segment[%s] to CloudFiles at location[%s]", inSegment.getIdentifier(), segmentPath); + File descriptorFile = null; + File zipOutFile = null; try { + final File descFile = descriptorFile = File.createTempFile("descriptor", ".json"); + final File outFile = zipOutFile = File.createTempFile("druid", "index.zip"); + + final long indexSize = CompressionUtils.zip(indexFilesDir, zipOutFile); + + log.info("Copying segment[%s] to CloudFiles at location[%s]", inSegment.getIdentifier(), segmentPath); return CloudFilesUtils.retryCloudFilesOperation( new Callable() { @@ -82,17 +86,17 @@ public class CloudFilesDataSegmentPusher implements DataSegmentPusher public DataSegment call() throws Exception { CloudFilesObject segmentData = new CloudFilesObject( - segmentPath, zipOutFile, objectApi.getRegion(), + segmentPath, outFile, objectApi.getRegion(), objectApi.getContainer() ); log.info("Pushing %s.", segmentData.getPath()); objectApi.put(segmentData); - try (FileOutputStream stream = new FileOutputStream(descriptorFile)) { + try (FileOutputStream stream = new FileOutputStream(descFile)) { stream.write(jsonMapper.writeValueAsBytes(inSegment)); } CloudFilesObject descriptorData = new CloudFilesObject( - segmentPath, descriptorFile, + segmentPath, descFile, objectApi.getRegion(), objectApi.getContainer() ); log.info("Pushing %s.", descriptorData.getPath()); @@ -123,11 +127,15 @@ public class CloudFilesDataSegmentPusher implements DataSegmentPusher throw Throwables.propagate(e); } finally { - log.info("Deleting zipped index File[%s]", zipOutFile); - zipOutFile.delete(); + if (zipOutFile != null) { + log.info("Deleting zipped index File[%s]", zipOutFile); + zipOutFile.delete(); + } - log.info("Deleting descriptor file[%s]", descriptorFile); - descriptorFile.delete(); + if (descriptorFile != null) { + log.info("Deleting descriptor file[%s]", descriptorFile); + descriptorFile.delete(); + } } } } diff --git a/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java b/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java index e18475f273a..ea02b086445 100644 --- a/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java +++ b/extensions/cloudfiles-extensions/src/main/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherConfig.java @@ -28,7 +28,6 @@ import javax.validation.constraints.NotNull; */ public class CloudFilesDataSegmentPusherConfig { - @JsonProperty @NotNull private String region; @@ -44,6 +43,26 @@ public class CloudFilesDataSegmentPusherConfig @JsonProperty private int operationMaxRetries = 10; + public void setRegion(String region) + { + this.region = region; + } + + public void setContainer(String container) + { + this.container = container; + } + + public void setBasePath(String basePath) + { + this.basePath = basePath; + } + + public void setOperationMaxRetries(int operationMaxRetries) + { + this.operationMaxRetries = operationMaxRetries; + } + public String getRegion() { Preconditions.checkNotNull(region); diff --git a/extensions/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java b/extensions/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java new file mode 100644 index 00000000000..2ec2c22390c --- /dev/null +++ b/extensions/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.storage.cloudfiles; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.io.Files; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.timeline.DataSegment; +import io.druid.timeline.partition.NoneShardSpec; +import org.easymock.EasyMock; +import org.jclouds.io.Payload; +import org.jclouds.openstack.swift.v1.features.ObjectApi; +import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; + +/** + */ +public class CloudFilesDataSegmentPusherTest +{ + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + + @Test + public void testPush() throws Exception + { + ObjectApi objectApi = EasyMock.createStrictMock(ObjectApi.class); + EasyMock.expect(objectApi.put(EasyMock.anyString(), EasyMock.anyObject())).andReturn(null).atLeastOnce(); + EasyMock.replay(objectApi); + + CloudFilesApi api = EasyMock.createStrictMock(CloudFilesApi.class); + EasyMock.expect(api.getObjectApi(EasyMock.anyString(), EasyMock.anyString())) + .andReturn(objectApi) + .atLeastOnce(); + EasyMock.replay(api); + + + CloudFilesDataSegmentPusherConfig config = new CloudFilesDataSegmentPusherConfig(); + config.setRegion("region"); + config.setContainer("container"); + config.setBasePath("basePath"); + + CloudFilesDataSegmentPusher pusher = new CloudFilesDataSegmentPusher(api, config, new DefaultObjectMapper()); + + // Create a mock segment on disk + File tmp = tempFolder.newFile("version.bin"); + + final byte[] data = new byte[]{0x0, 0x0, 0x0, 0x1}; + Files.write(data, tmp); + final long size = data.length; + + DataSegment segmentToPush = new DataSegment( + "foo", + new Interval("2015/2016"), + "0", + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), + new NoneShardSpec(), + 0, + size + ); + + DataSegment segment = pusher.push(tempFolder.getRoot(), segmentToPush); + + Assert.assertEquals(segmentToPush.getSize(), segment.getSize()); + + EasyMock.verify(api); + } +} diff --git a/extensions/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusherConfig.java b/extensions/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusherConfig.java index 21050d68a6f..6fed079bde7 100644 --- a/extensions/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusherConfig.java +++ b/extensions/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusherConfig.java @@ -26,7 +26,12 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class HdfsDataSegmentPusherConfig { @JsonProperty - public String storageDirectory = ""; + private String storageDirectory = ""; + + public void setStorageDirectory(String storageDirectory) + { + this.storageDirectory = storageDirectory; + } public String getStorageDirectory() { diff --git a/extensions/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java new file mode 100644 index 00000000000..7bcf4db16f4 --- /dev/null +++ b/extensions/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -0,0 +1,78 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.storage.hdfs; + +import com.google.api.client.util.Lists; +import com.google.api.client.util.Maps; +import com.google.common.io.Files; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.timeline.DataSegment; +import io.druid.timeline.partition.NoneShardSpec; +import org.apache.hadoop.conf.Configuration; +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; + +/** + */ +public class HdfsDataSegmentPusherTest +{ + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + + @Test + public void testPush() throws Exception + { + Configuration conf = new Configuration(true); + + // Create a mock segment on disk + File segmentDir = tempFolder.newFolder(); + File tmp = new File(segmentDir, "version.bin"); + + final byte[] data = new byte[]{0x0, 0x0, 0x0, 0x1}; + Files.write(data, tmp); + final long size = data.length; + + HdfsDataSegmentPusherConfig config = new HdfsDataSegmentPusherConfig(); + + config.setStorageDirectory(tempFolder.newFolder().getAbsolutePath()); + HdfsDataSegmentPusher pusher = new HdfsDataSegmentPusher(config, conf, new DefaultObjectMapper()); + + DataSegment segmentToPush = new DataSegment( + "foo", + new Interval("2015/2016"), + "0", + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), + new NoneShardSpec(), + 0, + size + ); + + DataSegment segment = pusher.push(segmentDir, segmentToPush); + + Assert.assertEquals(segmentToPush.getSize(), segment.getSize()); + } +} diff --git a/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiverConfig.java b/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiverConfig.java index 1525e1f3385..960b6886756 100644 --- a/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiverConfig.java +++ b/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiverConfig.java @@ -24,10 +24,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class S3DataSegmentArchiverConfig { @JsonProperty - public String archiveBucket = ""; + private String archiveBucket = ""; @JsonProperty - public String archiveBaseKey = ""; + private String archiveBaseKey = ""; public String getArchiveBucket() { diff --git a/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusherConfig.java b/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusherConfig.java index a611b3a1622..fc4f384ee2b 100644 --- a/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusherConfig.java +++ b/extensions/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusherConfig.java @@ -26,13 +26,28 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class S3DataSegmentPusherConfig { @JsonProperty - public String bucket = ""; + private String bucket = ""; @JsonProperty - public String baseKey = ""; + private String baseKey = ""; @JsonProperty - public boolean disableAcl = false; + private boolean disableAcl = false; + + public void setBucket(String bucket) + { + this.bucket = bucket; + } + + public void setBaseKey(String baseKey) + { + this.baseKey = baseKey; + } + + public void setDisableAcl(boolean disableAcl) + { + this.disableAcl = disableAcl; + } public String getBucket() { diff --git a/extensions/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java new file mode 100644 index 00000000000..62de4a62240 --- /dev/null +++ b/extensions/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java @@ -0,0 +1,87 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.storage.s3; + +import com.google.api.client.util.Lists; +import com.google.api.client.util.Maps; +import com.google.common.io.Files; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.timeline.DataSegment; +import io.druid.timeline.partition.NoneShardSpec; +import org.easymock.EasyMock; +import org.jets3t.service.impl.rest.httpclient.RestS3Service; +import org.jets3t.service.model.S3Object; +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import java.io.File; + +/** + */ +public class S3DataSegmentPusherTest +{ + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + + @Test + public void testPush() throws Exception + { + RestS3Service s3Client = EasyMock.createStrictMock(RestS3Service.class); + EasyMock.expect(s3Client.putObject(EasyMock.anyString(), EasyMock.anyObject())) + .andReturn(null) + .atLeastOnce(); + EasyMock.replay(s3Client); + + + S3DataSegmentPusherConfig config = new S3DataSegmentPusherConfig(); + config.setBucket("bucket"); + config.setBaseKey("key"); + + S3DataSegmentPusher pusher = new S3DataSegmentPusher(s3Client, config, new DefaultObjectMapper()); + + // Create a mock segment on disk + File tmp = tempFolder.newFile("version.bin"); + + final byte[] data = new byte[]{0x0, 0x0, 0x0, 0x1}; + Files.write(data, tmp); + final long size = data.length; + + DataSegment segmentToPush = new DataSegment( + "foo", + new Interval("2015/2016"), + "0", + Maps.newHashMap(), + Lists.newArrayList(), + Lists.newArrayList(), + new NoneShardSpec(), + 0, + size + ); + + DataSegment segment = pusher.push(tempFolder.getRoot(), segmentToPush); + + Assert.assertEquals(segmentToPush.getSize(), segment.getSize()); + + EasyMock.verify(s3Client); + } +}