Merge branch 'master' of github.com:metamx/druid

This commit is contained in:
fjy 2013-12-17 16:22:47 -08:00
commit e7112dbb38
6 changed files with 47 additions and 11 deletions

View File

@ -285,9 +285,10 @@ This deep storage is used to interface with Amazon's S3.
|Property|Description|Default| |Property|Description|Default|
|--------|-----------|-------| |--------|-----------|-------|
|`druid.storage.bucket`|S3 bucket name.|none| |`druid.storage.bucket`|S3 bucket name.|none|
|`druid.storage.basekey`|S3 base key.|none| |`druid.storage.basekey`|S3 object key prefix for storage.|none|
|`druid.storage.disableAcl`|Boolean flag for ACL.|false| |`druid.storage.disableAcl`|Boolean flag for ACL.|false|
|`druid.storage.archiveBucket`|S3 bucket name where segments get archived to when running the indexing service *archive task*|none| |`druid.storage.archiveBucket`|S3 bucket name for archiving when running the indexing-service *archive task*.|none|
|`druid.storage.archiveBasekey`|S3 object key prefix for archiving.|none|
#### HDFS Deep Storage #### HDFS Deep Storage

View File

@ -51,7 +51,7 @@
<div class="waiting_loading">Loading Waiting Tasks... this may take a few minutes</div> <div class="waiting_loading">Loading Waiting Tasks... this may take a few minutes</div>
<table id="waitingTable"></table> <table id="waitingTable"></table>
<h2>Complete Tasks</h2> <h2>Complete Tasks - Tasks recently completed</h2>
<div class="complete_loading">Loading Complete Tasks... this may take a few minutes</div> <div class="complete_loading">Loading Complete Tasks... this may take a few minutes</div>
<table id="completeTable"></table> <table id="completeTable"></table>

View File

@ -265,9 +265,11 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
final Object[] args = new Object[size + 1]; final Object[] args = new Object[size + 1];
args[0] = current; args[0] = current;
int i = 0; for (int i = 0 ; i < size ; i++) {
while (i < size) { final ObjectColumnSelector selector = selectorList[i];
args[i + 1] = selectorList[i++].get(); if (selector != null) {
args[i + 1] = selector.get();
}
} }
final Object res = fnAggregate.call(cx, scope, scope, args); final Object res = fnAggregate.call(cx, scope, scope, args);

View File

@ -27,6 +27,7 @@ import org.junit.Test;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.Map; import java.util.Map;
public class JavaScriptAggregatorTest public class JavaScriptAggregatorTest
@ -141,6 +142,39 @@ public class JavaScriptAggregatorTest
Assert.assertEquals(val, agg.get(buf, position)); Assert.assertEquals(val, agg.get(buf, position));
} }
@Test
public void testAggregateMissingColumn()
{
Map<String, String> script = scriptDoubleSum;
JavaScriptAggregator agg = new JavaScriptAggregator(
"billy",
Collections.<ObjectColumnSelector>singletonList(null),
JavaScriptAggregatorFactory.compileScript(script.get("fnAggregate"),
script.get("fnReset"),
script.get("fnCombine"))
);
final double val = 0;
Assert.assertEquals("billy", agg.getName());
agg.reset();
Assert.assertEquals(val, agg.get());
Assert.assertEquals(val, agg.get());
Assert.assertEquals(val, agg.get());
agg.aggregate();
Assert.assertEquals(val, agg.get());
Assert.assertEquals(val, agg.get());
Assert.assertEquals(val, agg.get());
agg.aggregate();
Assert.assertEquals(val, agg.get());
Assert.assertEquals(val, agg.get());
Assert.assertEquals(val, agg.get());
}
public static void main(String... args) throws Exception { public static void main(String... args) throws Exception {
final LoopingFloatColumnSelector selector = new LoopingFloatColumnSelector(new float[]{42.12f, 9f}); final LoopingFloatColumnSelector selector = new LoopingFloatColumnSelector(new float[]{42.12f, 9f});

View File

@ -21,7 +21,6 @@ package io.druid.storage.s3;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.common.MapUtils;
import io.druid.segment.loading.DataSegmentArchiver; import io.druid.segment.loading.DataSegmentArchiver;
import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.SegmentLoadingException;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
@ -46,7 +45,7 @@ public class S3DataSegmentArchiver extends S3DataSegmentMover implements DataSeg
public DataSegment archive(DataSegment segment) throws SegmentLoadingException public DataSegment archive(DataSegment segment) throws SegmentLoadingException
{ {
String targetS3Bucket = config.getArchiveBucket(); String targetS3Bucket = config.getArchiveBucket();
String targetS3BaseKey = config.getArchiveBaseKey(); String targetS3BaseKey = config.getArchiveBasekey();
return move( return move(
segment, segment,

View File

@ -27,15 +27,15 @@ public class S3DataSegmentArchiverConfig
public String archiveBucket = ""; public String archiveBucket = "";
@JsonProperty @JsonProperty
public String archiveBaseKey = ""; public String archiveBasekey = "";
public String getArchiveBucket() public String getArchiveBucket()
{ {
return archiveBucket; return archiveBucket;
} }
public String getArchiveBaseKey() public String getArchiveBasekey()
{ {
return archiveBaseKey; return archiveBasekey;
} }
} }