From ba6c289ccbd98583d1849fb2b4e1cfda25370ea7 Mon Sep 17 00:00:00 2001 From: Igal Levy Date: Mon, 31 Mar 2014 13:05:54 -0700 Subject: [PATCH 01/22] minor typos fixed --- docs/content/Tasks.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/content/Tasks.md b/docs/content/Tasks.md index 799b934bca5..061f02d872a 100644 --- a/docs/content/Tasks.md +++ b/docs/content/Tasks.md @@ -51,12 +51,12 @@ The Index Task is a simpler variation of the Index Hadoop task that is designed |--------|-----------|---------| |type|The task type, this should always be "index".|yes| |id|The task ID. If this is not explicitly specified, Druid generates the task ID using the name of the task file and date-time stamp. |no| -|granularitySpec|Specifies the segment chunks that the task will process. `type` is always "uniform"; `gran` sets the granularity of the chunks ("DAY" means all segments containing timestamps in the same day, while `intervals` sets the interval that the chunks will cover.|yes| +|granularitySpec|Specifies the segment chunks that the task will process. `type` is always "uniform"; `gran` sets the granularity of the chunks ("DAY" means all segments containing timestamps in the same day), while `intervals` sets the interval that the chunks will cover.|yes| |spatialDimensions|Dimensions to build spatial indexes over. See [Geographic Queries](GeographicQueries.html).|no| -|aggregators|The metrics to aggregate in the data set. For more info, see [Aggregations](Aggregations.html)|yes| +|aggregators|The metrics to aggregate in the data set. For more info, see [Aggregations](Aggregations.html).|yes| |indexGranularity|The rollup granularity for timestamps. See [Realtime Ingestion](Realtime-ingestion.html) for more information. |no| |targetPartitionSize|Used in sharding. Determines how many rows are in each segment.|no| -|firehose|The input source of data. For more info, see [Firehose](Firehose.html)|yes| +|firehose|The input source of data. For more info, see [Firehose](Firehose.html).|yes| |rowFlushBoundary|Used in determining when intermediate persist should occur to disk.|no| ### Index Hadoop Task From e0e43deaec26384766a61874db4aaa6bda4ae415 Mon Sep 17 00:00:00 2001 From: Hagen Rother Date: Fri, 4 Apr 2014 13:04:40 +0200 Subject: [PATCH 02/22] fix a NPE leading to broker 500 response --- .../main/java/io/druid/query/groupby/GroupByQueryEngine.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java index ea58501635b..b8f63a5c4a5 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java @@ -182,7 +182,7 @@ public class GroupByQueryEngine final DimensionSelector dimSelector = dims.get(0); final IndexedInts row = dimSelector.getRow(); - if (row.size() == 0) { + if (row == null || row.size() == 0) { ByteBuffer newKey = key.duplicate(); newKey.putInt(dimSelector.getValueCardinality()); unaggregatedBuffers = updateValues(newKey, dims.subList(1, dims.size())); From 4576c5077e9d8a20690bb586d384a32c71d5e8ab Mon Sep 17 00:00:00 2001 From: Igal Levy Date: Fri, 4 Apr 2014 10:05:33 -0700 Subject: [PATCH 03/22] switched operations and ingestion sections --- docs/content/toc.textile | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/content/toc.textile b/docs/content/toc.textile index 136aa730335..c17a7539c06 100644 --- a/docs/content/toc.textile +++ b/docs/content/toc.textile @@ -22,12 +22,6 @@ h2. Configuration * "Broker":Broker-Config.html * "Indexing Service":Indexing-Service-Config.html -h2. Operations -* "Extending Druid":./Modules.html -* "Cluster Setup":./Cluster-setup.html -* "Booting a Production Cluster":./Booting-a-production-cluster.html -* "Performance FAQ":./Performance-FAQ.html - h2. Data Ingestion * "Realtime":./Realtime-ingestion.html * "Batch":./Batch-ingestion.html @@ -36,6 +30,12 @@ h2. Data Ingestion * "Data Formats":./Data_formats.html * "Ingestion FAQ":./Ingestion-FAQ.html +h2. Operations +* "Extending Druid":./Modules.html +* "Cluster Setup":./Cluster-setup.html +* "Booting a Production Cluster":./Booting-a-production-cluster.html +* "Performance FAQ":./Performance-FAQ.html + h2. Querying * "Querying":./Querying.html ** "Filters":./Filters.html From b21820c68adc6a382114a6fee812f906d73563c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Fri, 4 Apr 2014 14:20:54 -0700 Subject: [PATCH 04/22] make index file acl consistent with descriptor --- .../src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java index 664c270799b..5dad247544b 100644 --- a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java +++ b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java @@ -89,7 +89,7 @@ public class S3DataSegmentPusher implements DataSegmentPusher toPush.setBucketName(outputBucket); toPush.setKey(s3Path); if (!config.getDisableAcl()) { - toPush.setAcl(AccessControlList.REST_CANNED_AUTHENTICATED_READ); + toPush.setAcl(GSAccessControlList.REST_CANNED_BUCKET_OWNER_FULL_CONTROL); } log.info("Pushing %s.", toPush); From cfe2466e4914bb3377c907e735777afb60dffea0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Fri, 4 Apr 2014 14:28:54 -0700 Subject: [PATCH 05/22] enforce acl on object move --- .../java/io/druid/storage/s3/S3DataSegmentMover.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java index fbf18df4c18..d259ab185d2 100644 --- a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java +++ b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java @@ -30,6 +30,7 @@ import io.druid.segment.loading.DataSegmentMover; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import org.jets3t.service.ServiceException; +import org.jets3t.service.acl.gs.GSAccessControlList; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; @@ -41,13 +42,16 @@ public class S3DataSegmentMover implements DataSegmentMover private static final Logger log = new Logger(S3DataSegmentMover.class); private final RestS3Service s3Client; + private final S3DataSegmentPusherConfig config; @Inject public S3DataSegmentMover( - RestS3Service s3Client + RestS3Service s3Client, + S3DataSegmentPusherConfig config ) { this.s3Client = s3Client; + this.config = config; } @Override @@ -124,7 +128,11 @@ public class S3DataSegmentMover implements DataSegmentMover targetS3Bucket, targetS3Path ); - s3Client.moveObject(s3Bucket, s3Path, targetS3Bucket, new S3Object(targetS3Path), false); + final S3Object target = new S3Object(targetS3Path); + if(!config.getDisableAcl()) { + target.setAcl(GSAccessControlList.REST_CANNED_BUCKET_OWNER_FULL_CONTROL); + } + s3Client.moveObject(s3Bucket, s3Path, targetS3Bucket, target, false); } } else { // ensure object exists in target location From c46a39b13e1bfccdc09301b26f9cc4804d417c3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Fri, 4 Apr 2014 14:52:22 -0700 Subject: [PATCH 06/22] fix missing arg --- .../java/io/druid/storage/s3/S3DataSegmentArchiver.java | 2 +- .../java/io/druid/storage/s3/S3DataSegmentMoverTest.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java index 69b1843c125..e8e98987bf1 100644 --- a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java +++ b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java @@ -39,7 +39,7 @@ public class S3DataSegmentArchiver extends S3DataSegmentMover implements DataSeg S3DataSegmentPusherConfig restoreConfig ) { - super(s3Client); + super(s3Client, restoreConfig); this.archiveConfig = archiveConfig; this.restoreConfig = restoreConfig; } diff --git a/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java index 6206da881a4..c13d22de5f1 100644 --- a/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java @@ -62,7 +62,7 @@ public class S3DataSegmentMoverTest public void testMove() throws Exception { MockStorageService mockS3Client = new MockStorageService(); - S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client); + S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mockS3Client.putObject("main", new S3Object("baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip")); mockS3Client.putObject("main", new S3Object("baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/descriptor.json")); @@ -82,7 +82,7 @@ public class S3DataSegmentMoverTest public void testMoveNoop() throws Exception { MockStorageService mockS3Client = new MockStorageService(); - S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client); + S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mockS3Client.putObject("archive", new S3Object("targetBaseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip")); mockS3Client.putObject("archive", new S3Object("targetBaseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/descriptor.json")); @@ -103,7 +103,7 @@ public class S3DataSegmentMoverTest public void testMoveException() throws Exception { MockStorageService mockS3Client = new MockStorageService(); - S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client); + S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mover.move( sourceSegment, From 1267fbb7f5b2c5adb290ad7c1b354011d249c4b2 Mon Sep 17 00:00:00 2001 From: fjy Date: Sun, 6 Apr 2014 09:20:58 -0700 Subject: [PATCH 07/22] fix context to be backwards compat --- .../main/java/io/druid/query/BaseQuery.java | 65 +++++++++ .../io/druid/query/BySegmentQueryRunner.java | 2 +- .../query/BySegmentSkippingQueryRunner.java | 2 +- .../query/ChainedExecutionQueryRunner.java | 2 +- .../query/FinalizeResultsQueryRunner.java | 9 +- .../query/GroupByParallelQueryRunner.java | 2 +- .../src/main/java/io/druid/query/Query.java | 7 + .../search/SearchQueryQueryToolChest.java | 2 +- .../query/topn/TopNQueryQueryToolChest.java | 2 +- .../client/CachePopulatingQueryRunner.java | 2 +- .../druid/client/CachingClusteredClient.java | 15 +- .../io/druid/client/DirectDruidClient.java | 2 +- .../server/AsyncQueryForwardingServlet.java | 5 +- .../client/CachingClusteredClientTest.java | 138 ++++++++++-------- 14 files changed, 169 insertions(+), 86 deletions(-) diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java index 71beaa26652..09316db058a 100644 --- a/processing/src/main/java/io/druid/query/BaseQuery.java +++ b/processing/src/main/java/io/druid/query/BaseQuery.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; +import com.metamx.common.ISE; import com.metamx.common.guava.Sequence; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.Duration; @@ -120,6 +121,70 @@ public abstract class BaseQuery implements Query return retVal == null ? defaultValue : retVal; } + @Override + public int getContextPriority(int defaultValue) + { + Object val = context.get("priority"); + if (val == null) { + return defaultValue; + } + if (val instanceof String) { + return Integer.parseInt((String) val); + } else if (val instanceof Integer) { + return (int) val; + } else { + throw new ISE("Unknown type [%s]", val.getClass()); + } + } + + @Override + public boolean getContextBySegment(boolean defaultValue) + { + Object val = context.get("bySegment"); + if (val == null) { + return defaultValue; + } + if (val instanceof String) { + return Boolean.parseBoolean((String) val); + } else if (val instanceof Integer) { + return (boolean) val; + } else { + throw new ISE("Unknown type [%s]", val.getClass()); + } + } + + @Override + public boolean getContextPopulateCache(boolean defaultValue) + { + Object val = context.get("populateCache"); + if (val == null) { + return defaultValue; + } + if (val instanceof String) { + return Boolean.parseBoolean((String) val); + } else if (val instanceof Integer) { + return (boolean) val; + } else { + throw new ISE("Unknown type [%s]", val.getClass()); + } + } + + @Override + public boolean getContextUseCache(boolean defaultValue) + { + Object val = context.get("useCache"); + if (val == null) { + return defaultValue; + } + if (val instanceof String) { + return Boolean.parseBoolean((String) val); + } else if (val instanceof Integer) { + return (boolean) val; + } else { + throw new ISE("Unknown type [%s]", val.getClass()); + } + } + protected Map computeOverridenContext(Map overrides) { Map overridden = Maps.newTreeMap(); diff --git a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java index d6150f63456..44094d0216a 100644 --- a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java @@ -53,7 +53,7 @@ public class BySegmentQueryRunner implements QueryRunner @SuppressWarnings("unchecked") public Sequence run(final Query query) { - if (Boolean.parseBoolean(query.getContextValue("bySegment"))) { + if (query.getContextBySegment(false)) { final Sequence baseSequence = base.run(query); return new Sequence() { diff --git a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java index 8e666c30b16..13ca4dd75df 100644 --- a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java @@ -37,7 +37,7 @@ public abstract class BySegmentSkippingQueryRunner implements QueryRunner @Override public Sequence run(Query query) { - if (Boolean.parseBoolean(query.getContextValue("bySegment"))) { + if (query.getContextBySegment(false)) { return baseRunner.run(query); } diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java index d3600068a23..3e3e6b03243 100644 --- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java @@ -83,7 +83,7 @@ public class ChainedExecutionQueryRunner implements QueryRunner @Override public Sequence run(final Query query) { - final int priority = Integer.parseInt((String) query.getContextValue("priority", "0")); + final int priority = query.getContextValue("priority", 0); return new BaseSequence>( new BaseSequence.IteratorMaker>() diff --git a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java index 2880332e184..dee05888470 100644 --- a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java +++ b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java @@ -48,8 +48,8 @@ public class FinalizeResultsQueryRunner implements QueryRunner @Override public Sequence run(final Query query) { - final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment")); - final boolean shouldFinalize = Boolean.parseBoolean(query.getContextValue("finalize", "true")); + final boolean isBySegment = query.getContextBySegment(false); + final boolean shouldFinalize = query.getContextFinalize(true); if (shouldFinalize) { Function finalizerFn; if (isBySegment) { @@ -84,8 +84,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner ); } }; - } - else { + } else { finalizerFn = toolChest.makeMetricManipulatorFn( query, new MetricManipulationFn() @@ -100,7 +99,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner } return Sequences.map( - baseRunner.run(query.withOverriddenContext(ImmutableMap.of("finalize", "false"))), + baseRunner.run(query.withOverriddenContext(ImmutableMap.of("finalize", false))), finalizerFn ); } diff --git a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java index 10dde9b26ea..20817a772e5 100644 --- a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java +++ b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java @@ -83,7 +83,7 @@ public class GroupByParallelQueryRunner implements QueryRunner query, configSupplier.get() ); - final int priority = Integer.parseInt((String) query.getContextValue("priority", "0")); + final int priority = query.getContextPriority(0); if (Iterables.isEmpty(queryables)) { log.warn("No queryables found."); diff --git a/processing/src/main/java/io/druid/query/Query.java b/processing/src/main/java/io/druid/query/Query.java index 10a84328584..2de75e57455 100644 --- a/processing/src/main/java/io/druid/query/Query.java +++ b/processing/src/main/java/io/druid/query/Query.java @@ -74,6 +74,13 @@ public interface Query public ContextType getContextValue(String key, ContextType defaultValue); + // For backwards compatibility + public int getContextPriority(int defaultValue); + public boolean getContextBySegment(boolean defaultValue); + public boolean getContextPopulateCache(boolean defaultValue); + public boolean getContextUseCache(boolean defaultValue); + public boolean getContextFinalize(boolean defaultValue); + public Query withOverriddenContext(Map contextOverride); public Query withQuerySegmentSpec(QuerySegmentSpec spec); diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index 6e14ef1c1f3..f559829d593 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -294,7 +294,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest implements QueryRunner final CacheStrategy strategy = toolChest.getCacheStrategy(query); - final boolean populateCache = Boolean.parseBoolean(query.getContextValue(CacheConfig.POPULATE_CACHE, "true")) + final boolean populateCache = query.getContextPopulateCache(true) && strategy != null && cacheConfig.isPopulateCache() // historical only populates distributed cache since the cache lookups are done at broker. diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 65ac6bea415..0e63f9e4ac3 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -62,7 +62,6 @@ import io.druid.timeline.partition.PartitionChunk; import org.joda.time.DateTime; import org.joda.time.Interval; -import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -125,24 +124,24 @@ public class CachingClusteredClient implements QueryRunner final List> cachedResults = Lists.newArrayList(); final Map cachePopulatorMap = Maps.newHashMap(); - final boolean useCache = Boolean.parseBoolean(query.getContextValue(CacheConfig.USE_CACHE, "true")) + final boolean useCache = query.getContextUseCache(true) && strategy != null && cacheConfig.isUseCache(); - final boolean populateCache = Boolean.parseBoolean(query.getContextValue(CacheConfig.POPULATE_CACHE, "true")) + final boolean populateCache = query.getContextPopulateCache(true) && strategy != null && cacheConfig.isPopulateCache(); - final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false")); + final boolean isBySegment = query.getContextBySegment(false); ImmutableMap.Builder contextBuilder = new ImmutableMap.Builder<>(); - final String priority = query.getContextValue("priority", "0"); + final int priority = query.getContextPriority(0); contextBuilder.put("priority", priority); if (populateCache) { - contextBuilder.put(CacheConfig.POPULATE_CACHE, "false"); - contextBuilder.put("bySegment", "true"); + contextBuilder.put(CacheConfig.POPULATE_CACHE, false); + contextBuilder.put("bySegment", true); } - contextBuilder.put("intermediate", "true"); + contextBuilder.put("intermediate", true); final Query rewrittenQuery = query.withOverriddenContext(contextBuilder.build()); diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index fa95ba97f11..76c842029b8 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -106,7 +106,7 @@ public class DirectDruidClient implements QueryRunner public Sequence run(Query query) { QueryToolChest> toolChest = warehouse.getToolChest(query); - boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false")); + boolean isBySegment = query.getContextBySegment(false); Pair types = typesMap.get(query.getClass()); if (types == null) { diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 85f33a70007..26f2c8f6ff2 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -106,8 +106,6 @@ public class AsyncQueryForwardingServlet extends HttpServlet } req.setAttribute(DISPATCHED, true); - resp.setStatus(200); - resp.setContentType("application/x-javascript"); query = objectMapper.readValue(req.getInputStream(), Query.class); queryId = query.getId(); @@ -132,6 +130,9 @@ public class AsyncQueryForwardingServlet extends HttpServlet @Override public ClientResponse handleResponse(HttpResponse response) { + resp.setStatus(response.getStatus().getCode()); + resp.setContentType("application/x-javascript"); + byte[] bytes = getContentBytes(response.getContent()); if (bytes.length > 0) { try { diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index cfea29f9a8e..bb8787e66db 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -214,13 +214,13 @@ public class CachingClusteredClientTest public void testTimeseriesCaching() throws Exception { final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() - .dataSource(DATA_SOURCE) - .intervals(SEG_SPEC) - .filters(DIM_FILTER) - .granularity(GRANULARITY) - .aggregators(AGGS) - .postAggregators(POST_AGGS) - .context(CONTEXT); + .dataSource(DATA_SOURCE) + .intervals(SEG_SPEC) + .filters(DIM_FILTER) + .granularity(GRANULARITY) + .aggregators(AGGS) + .postAggregators(POST_AGGS) + .context(CONTEXT); testQueryCaching( builder.build(), @@ -265,9 +265,9 @@ public class CachingClusteredClientTest ), client.run( builder.intervals("2011-01-01/2011-01-10") - .aggregators(RENAMED_AGGS) - .postAggregators(RENAMED_POST_AGGS) - .build() + .aggregators(RENAMED_AGGS) + .postAggregators(RENAMED_POST_AGGS) + .build() ) ); } @@ -277,13 +277,13 @@ public class CachingClusteredClientTest public void testTimeseriesCachingTimeZone() throws Exception { final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() - .dataSource(DATA_SOURCE) - .intervals(SEG_SPEC) - .filters(DIM_FILTER) - .granularity(PT1H_TZ_GRANULARITY) - .aggregators(AGGS) - .postAggregators(POST_AGGS) - .context(CONTEXT); + .dataSource(DATA_SOURCE) + .intervals(SEG_SPEC) + .filters(DIM_FILTER) + .granularity(PT1H_TZ_GRANULARITY) + .aggregators(AGGS) + .postAggregators(POST_AGGS) + .context(CONTEXT); testQueryCaching( builder.build(), @@ -305,9 +305,9 @@ public class CachingClusteredClientTest ), client.run( builder.intervals("2011-11-04/2011-11-08") - .aggregators(RENAMED_AGGS) - .postAggregators(RENAMED_POST_AGGS) - .build() + .aggregators(RENAMED_AGGS) + .postAggregators(RENAMED_POST_AGGS) + .build() ) ); } @@ -316,18 +316,22 @@ public class CachingClusteredClientTest public void testDisableUseCache() throws Exception { final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() - .dataSource(DATA_SOURCE) - .intervals(SEG_SPEC) - .filters(DIM_FILTER) - .granularity(GRANULARITY) - .aggregators(AGGS) - .postAggregators(POST_AGGS); + .dataSource(DATA_SOURCE) + .intervals(SEG_SPEC) + .filters(DIM_FILTER) + .granularity(GRANULARITY) + .aggregators(AGGS) + .postAggregators(POST_AGGS); testQueryCaching( 1, true, - builder.context(ImmutableMap.of("useCache", "false", - "populateCache", "true")).build(), + builder.context( + ImmutableMap.of( + "useCache", "false", + "populateCache", "true" + ) + ).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) ); @@ -340,8 +344,12 @@ public class CachingClusteredClientTest testQueryCaching( 1, false, - builder.context(ImmutableMap.of("useCache", "false", - "populateCache", "false")).build(), + builder.context( + ImmutableMap.of( + "useCache", "false", + "populateCache", "false" + ) + ).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) ); @@ -352,8 +360,12 @@ public class CachingClusteredClientTest testQueryCaching( 1, false, - builder.context(ImmutableMap.of("useCache", "true", - "populateCache", "false")).build(), + builder.context( + ImmutableMap.of( + "useCache", "true", + "populateCache", "false" + ) + ).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) ); @@ -422,10 +434,10 @@ public class CachingClusteredClientTest ), client.run( builder.intervals("2011-01-01/2011-01-10") - .metric("imps") - .aggregators(RENAMED_AGGS) - .postAggregators(RENAMED_POST_AGGS) - .build() + .metric("imps") + .aggregators(RENAMED_AGGS) + .postAggregators(RENAMED_POST_AGGS) + .build() ) ); } @@ -467,10 +479,10 @@ public class CachingClusteredClientTest ), client.run( builder.intervals("2011-11-04/2011-11-08") - .metric("imps") - .aggregators(RENAMED_AGGS) - .postAggregators(RENAMED_POST_AGGS) - .build() + .metric("imps") + .aggregators(RENAMED_AGGS) + .postAggregators(RENAMED_POST_AGGS) + .build() ) ); } @@ -533,10 +545,10 @@ public class CachingClusteredClientTest ), client.run( builder.intervals("2011-01-01/2011-01-10") - .metric("imps") - .aggregators(RENAMED_AGGS) - .postAggregators(RENAMED_POST_AGGS) - .build() + .metric("imps") + .aggregators(RENAMED_AGGS) + .postAggregators(RENAMED_POST_AGGS) + .build() ) ); } @@ -638,8 +650,8 @@ public class CachingClusteredClientTest EasyMock.expect(serverView.getQueryRunner(server)) - .andReturn(expectations.getQueryRunner()) - .once(); + .andReturn(expectations.getQueryRunner()) + .once(); final Capture capture = new Capture(); queryCaptures.add(capture); @@ -656,8 +668,8 @@ public class CachingClusteredClientTest } EasyMock.expect(queryable.run(EasyMock.capture(capture))) - .andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)) - .once(); + .andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)) + .once(); } else if (query instanceof TopNQuery) { List segmentIds = Lists.newArrayList(); @@ -669,8 +681,8 @@ public class CachingClusteredClientTest results.add(expectation.getResults()); } EasyMock.expect(queryable.run(EasyMock.capture(capture))) - .andReturn(toQueryableTopNResults(segmentIds, intervals, results)) - .once(); + .andReturn(toQueryableTopNResults(segmentIds, intervals, results)) + .once(); } else if (query instanceof SearchQuery) { List segmentIds = Lists.newArrayList(); List intervals = Lists.newArrayList(); @@ -681,8 +693,8 @@ public class CachingClusteredClientTest results.add(expectation.getResults()); } EasyMock.expect(queryable.run(EasyMock.capture(capture))) - .andReturn(toQueryableSearchResults(segmentIds, intervals, results)) - .once(); + .andReturn(toQueryableSearchResults(segmentIds, intervals, results)) + .once(); } else if (query instanceof TimeBoundaryQuery) { List segmentIds = Lists.newArrayList(); List intervals = Lists.newArrayList(); @@ -693,8 +705,8 @@ public class CachingClusteredClientTest results.add(expectation.getResults()); } EasyMock.expect(queryable.run(EasyMock.capture(capture))) - .andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)) - .once(); + .andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)) + .once(); } else { throw new ISE("Unknown query type[%s]", query.getClass()); } @@ -762,11 +774,11 @@ public class CachingClusteredClientTest for (Capture queryCapture : queryCaptures) { Query capturedQuery = (Query) queryCapture.getValue(); if (expectBySegment) { - Assert.assertEquals("true", capturedQuery.getContextValue("bySegment")); + Assert.assertEquals(true, capturedQuery.getContextValue("bySegment")); } else { Assert.assertTrue( capturedQuery.getContextValue("bySegment") == null || - capturedQuery.getContextValue("bySegment").equals("false") + capturedQuery.getContextValue("bySegment").equals(false) ); } } @@ -1160,13 +1172,13 @@ public class CachingClusteredClientTest return new CachingClusteredClient( new MapQueryToolChestWarehouse( ImmutableMap., QueryToolChest>builder() - .put( - TimeseriesQuery.class, - new TimeseriesQueryQueryToolChest(new QueryConfig()) - ) - .put(TopNQuery.class, new TopNQueryQueryToolChest(new TopNQueryConfig())) - .put(SearchQuery.class, new SearchQueryQueryToolChest(new SearchQueryConfig())) - .build() + .put( + TimeseriesQuery.class, + new TimeseriesQueryQueryToolChest(new QueryConfig()) + ) + .put(TopNQuery.class, new TopNQueryQueryToolChest(new TopNQueryConfig())) + .put(SearchQuery.class, new SearchQueryQueryToolChest(new SearchQueryConfig())) + .build() ), new TimelineServerView() { From 98c3faa767f572d058ec05a51dc3339d4a3d556f Mon Sep 17 00:00:00 2001 From: fjy Date: Sun, 6 Apr 2014 09:23:59 -0700 Subject: [PATCH 08/22] cleanup --- .../main/java/io/druid/query/BaseQuery.java | 41 ++++++++----------- 1 file changed, 16 insertions(+), 25 deletions(-) diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java index 09316db058a..fb38378df76 100644 --- a/processing/src/main/java/io/druid/query/BaseQuery.java +++ b/processing/src/main/java/io/druid/query/BaseQuery.java @@ -140,48 +140,39 @@ public abstract class BaseQuery implements Query @Override public boolean getContextBySegment(boolean defaultValue) { - Object val = context.get("bySegment"); - if (val == null) { - return defaultValue; - } - if (val instanceof String) { - return Boolean.parseBoolean((String) val); - } else if (val instanceof Integer) { - return (boolean) val; - } else { - throw new ISE("Unknown type [%s]", val.getClass()); - } + return parseBoolean("bySegment", defaultValue); } @Override public boolean getContextPopulateCache(boolean defaultValue) { - Object val = context.get("populateCache"); - if (val == null) { - return defaultValue; - } - if (val instanceof String) { - return Boolean.parseBoolean((String) val); - } else if (val instanceof Integer) { - return (boolean) val; - } else { - throw new ISE("Unknown type [%s]", val.getClass()); - } + return parseBoolean("populateCache", defaultValue); } @Override public boolean getContextUseCache(boolean defaultValue) { - Object val = context.get("useCache"); + return parseBoolean("useCache", defaultValue); + } + + @Override + public boolean getContextFinalize(boolean defaultValue) + { + return parseBoolean("finalize", defaultValue); + } + + private boolean parseBoolean(String key, boolean defaultValue) + { + Object val = context.get(key); if (val == null) { return defaultValue; } if (val instanceof String) { return Boolean.parseBoolean((String) val); - } else if (val instanceof Integer) { + } else if (val instanceof Boolean) { return (boolean) val; } else { - throw new ISE("Unknown type [%s]", val.getClass()); + throw new ISE("Unknown type [%s]. Cannot parse!", val.getClass()); } } From 4d6174ebf296c0848fc46d90eee0ee2d533b011b Mon Sep 17 00:00:00 2001 From: fjy Date: Sun, 6 Apr 2014 09:47:37 -0700 Subject: [PATCH 09/22] be able to set maxPendingPersists in plumber --- .../realtime/plumber/RealtimePlumberSchool.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java index 4a8332137d4..447caa4a7e6 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java @@ -117,6 +117,12 @@ public class RealtimePlumberSchool implements PlumberSchool this.rejectionPolicyFactory = factory; } + @JsonProperty("maxPendingPersists") + public void setDefaultMaxPendingPersists(int maxPendingPersists) + { + this.maxPendingPersists = maxPendingPersists; + } + public void setEmitter(ServiceEmitter emitter) { this.emitter = emitter; @@ -152,11 +158,6 @@ public class RealtimePlumberSchool implements PlumberSchool this.queryExecutorService = executorService; } - public void setDefaultMaxPendingPersists(int maxPendingPersists) - { - this.maxPendingPersists = maxPendingPersists; - } - @Override public Plumber findPlumber(final Schema schema, final FireDepartmentMetrics metrics) { From f0e9d6fd83e07d7883e67297bad762366dcbc5ee Mon Sep 17 00:00:00 2001 From: fjy Date: Sun, 6 Apr 2014 12:41:39 -0700 Subject: [PATCH 10/22] fix query forwarding --- .../src/main/java/io/druid/client/RoutingDruidClient.java | 3 +-- .../java/io/druid/server/AsyncQueryForwardingServlet.java | 7 ++++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/io/druid/client/RoutingDruidClient.java b/server/src/main/java/io/druid/client/RoutingDruidClient.java index 9fd3e2b0eac..7ad4ec5d820 100644 --- a/server/src/main/java/io/druid/client/RoutingDruidClient.java +++ b/server/src/main/java/io/druid/client/RoutingDruidClient.java @@ -68,13 +68,12 @@ public class RoutingDruidClient } public ListenableFuture run( - String host, + String url, Query query, HttpResponseHandler responseHandler ) { final ListenableFuture future; - final String url = String.format("http://%s/druid/v2/", host); try { log.debug("Querying url[%s]", url); diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 26f2c8f6ff2..4302b8692f8 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -210,7 +210,7 @@ public class AsyncQueryForwardingServlet extends HttpServlet @Override public void run() { - routingDruidClient.run(host, theQuery, responseHandler); + routingDruidClient.run(makeUrl(host, req), theQuery, responseHandler); } } ); @@ -236,4 +236,9 @@ public class AsyncQueryForwardingServlet extends HttpServlet .emit(); } } + + private String makeUrl(String host, HttpServletRequest req) + { + return String.format("http://%s%s?%s", host, req.getRequestURI(), req.getQueryString()); + } } From 40c14c9a3c74fba1aa419dd8a45c09a0dc88e44e Mon Sep 17 00:00:00 2001 From: fjy Date: Sun, 6 Apr 2014 12:43:40 -0700 Subject: [PATCH 11/22] fix case with no request params --- .../java/io/druid/server/AsyncQueryForwardingServlet.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 4302b8692f8..5d1dc796b6b 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -239,6 +239,11 @@ public class AsyncQueryForwardingServlet extends HttpServlet private String makeUrl(String host, HttpServletRequest req) { + String queryString = req.getQueryString(); + + if (queryString == null) { + return String.format("http://%s%s", host, req.getRequestURI()); + } return String.format("http://%s%s?%s", host, req.getRequestURI(), req.getQueryString()); } } From 00369542618d7665bcbe9d9356dbd3a6c5d23dea Mon Sep 17 00:00:00 2001 From: Jon Chase Date: Mon, 7 Apr 2014 09:14:21 -0400 Subject: [PATCH 12/22] docs: correcting path to index file --- docs/content/Tutorial:-Loading-Your-Data-Part-1.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/content/Tutorial:-Loading-Your-Data-Part-1.md b/docs/content/Tutorial:-Loading-Your-Data-Part-1.md index 122ce70ccc4..cc7106b53e3 100644 --- a/docs/content/Tutorial:-Loading-Your-Data-Part-1.md +++ b/docs/content/Tutorial:-Loading-Your-Data-Part-1.md @@ -136,7 +136,7 @@ Indexing the Data To index the data and build a Druid segment, we are going to need to submit a task to the indexing service. This task should already exist: ``` -examples/indexing/index_task.json +examples/indexing/wikipedia_index_task.json ``` Open up the file to see the following: From dda09633be602cb778070abc789e5cbd83896ea3 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 10:53:43 -0700 Subject: [PATCH 13/22] make query id a bit more readable --- server/src/main/java/io/druid/server/QueryIDProvider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/io/druid/server/QueryIDProvider.java b/server/src/main/java/io/druid/server/QueryIDProvider.java index 9226af1606b..8283a73eff4 100644 --- a/server/src/main/java/io/druid/server/QueryIDProvider.java +++ b/server/src/main/java/io/druid/server/QueryIDProvider.java @@ -44,7 +44,7 @@ public class QueryIDProvider return String.format( "%s_%s_%s_%s_%s", query.getDataSource(), - query.getDuration(), + query.getIntervals(), host, new DateTime(), id.incrementAndGet() From 93349b5d07d7feb35eaf2abdcd3caa9910cccfa1 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 10:56:38 -0700 Subject: [PATCH 14/22] fix broken unit tests --- processing/src/main/java/io/druid/query/BaseQuery.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java index fb38378df76..32d9c3256f4 100644 --- a/processing/src/main/java/io/druid/query/BaseQuery.java +++ b/processing/src/main/java/io/druid/query/BaseQuery.java @@ -124,6 +124,9 @@ public abstract class BaseQuery implements Query @Override public int getContextPriority(int defaultValue) { + if (context == null) { + return defaultValue; + } Object val = context.get("priority"); if (val == null) { return defaultValue; @@ -163,6 +166,9 @@ public abstract class BaseQuery implements Query private boolean parseBoolean(String key, boolean defaultValue) { + if (context == null) { + return defaultValue; + } Object val = context.get(key); if (val == null) { return defaultValue; From 2e9579a1468f4bafea9eefe2df58cf1494a543d2 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 12:43:30 -0700 Subject: [PATCH 15/22] prepare for next release --- build.sh | 2 +- docs/content/Examples.md | 4 ++-- docs/content/Indexing-Service-Config.md | 4 ++-- docs/content/Realtime-Config.md | 4 ++-- docs/content/Tutorial:-A-First-Look-at-Druid.md | 4 ++-- docs/content/Tutorial:-The-Druid-Cluster.md | 6 +++--- docs/content/Tutorial:-Webstream.md | 4 ++-- docs/content/Twitter-Tutorial.textile | 2 +- examples/config/historical/runtime.properties | 2 +- examples/config/realtime/runtime.properties | 2 +- pom.xml | 2 +- services/src/main/java/io/druid/cli/CliBroker.java | 2 +- services/src/main/java/io/druid/cli/CliCoordinator.java | 2 +- services/src/main/java/io/druid/cli/CliHadoopIndexer.java | 2 +- services/src/main/java/io/druid/cli/CliHistorical.java | 2 +- services/src/main/java/io/druid/cli/CliOverlord.java | 2 +- services/src/main/java/io/druid/cli/CliRealtime.java | 2 +- 17 files changed, 24 insertions(+), 24 deletions(-) diff --git a/build.sh b/build.sh index 25b027b5626..be57dcb3b62 100755 --- a/build.sh +++ b/build.sh @@ -30,4 +30,4 @@ echo "For examples, see: " echo " " ls -1 examples/*/*sh echo " " -echo "See also http://druid.io/docs/0.6.81" +echo "See also http://druid.io/docs/0.6.83" diff --git a/docs/content/Examples.md b/docs/content/Examples.md index beb436c78a9..8b43f353eb8 100644 --- a/docs/content/Examples.md +++ b/docs/content/Examples.md @@ -19,13 +19,13 @@ Clone Druid and build it: git clone https://github.com/metamx/druid.git druid cd druid git fetch --tags -git checkout druid-0.6.81 +git checkout druid-0.6.83 ./build.sh ``` ### Downloading the DSK (Druid Standalone Kit) -[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz) a stand-alone tarball and run it: +[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz) a stand-alone tarball and run it: ``` bash tar -xzf druid-services-0.X.X-bin.tar.gz diff --git a/docs/content/Indexing-Service-Config.md b/docs/content/Indexing-Service-Config.md index 19646a5ac47..366dd3b85f4 100644 --- a/docs/content/Indexing-Service-Config.md +++ b/docs/content/Indexing-Service-Config.md @@ -66,7 +66,7 @@ druid.host=#{IP_ADDR}:8080 druid.port=8080 druid.service=druid/prod/indexer -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83"] druid.zk.service.host=#{ZK_IPs} druid.zk.paths.base=/druid/prod @@ -115,7 +115,7 @@ druid.host=#{IP_ADDR}:8080 druid.port=8080 druid.service=druid/prod/worker -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83"] druid.zk.service.host=#{ZK_IPs} druid.zk.paths.base=/druid/prod diff --git a/docs/content/Realtime-Config.md b/docs/content/Realtime-Config.md index 410fe8e4bce..9c1ce7c9bed 100644 --- a/docs/content/Realtime-Config.md +++ b/docs/content/Realtime-Config.md @@ -27,7 +27,7 @@ druid.host=localhost druid.service=realtime druid.port=8083 -druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.83"] druid.zk.service.host=localhost @@ -76,7 +76,7 @@ druid.host=#{IP_ADDR}:8080 druid.port=8080 druid.service=druid/prod/realtime -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83"] druid.zk.service.host=#{ZK_IPs} druid.zk.paths.base=/druid/prod diff --git a/docs/content/Tutorial:-A-First-Look-at-Druid.md b/docs/content/Tutorial:-A-First-Look-at-Druid.md index 468e78c310c..8f5a2ce5dd1 100644 --- a/docs/content/Tutorial:-A-First-Look-at-Druid.md +++ b/docs/content/Tutorial:-A-First-Look-at-Druid.md @@ -49,7 +49,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu ### Download a Tarball -We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz). Download this file to a directory of your choosing. +We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz). Download this file to a directory of your choosing. You can extract the awesomeness within by issuing: @@ -60,7 +60,7 @@ tar -zxvf druid-services-*-bin.tar.gz Not too lost so far right? That's great! If you cd into the directory: ``` -cd druid-services-0.6.81 +cd druid-services-0.6.83 ``` You should see a bunch of files: diff --git a/docs/content/Tutorial:-The-Druid-Cluster.md b/docs/content/Tutorial:-The-Druid-Cluster.md index 215c2d83207..2a8e2245f37 100644 --- a/docs/content/Tutorial:-The-Druid-Cluster.md +++ b/docs/content/Tutorial:-The-Druid-Cluster.md @@ -13,7 +13,7 @@ In this tutorial, we will set up other types of Druid nodes and external depende If you followed the first tutorial, you should already have Druid downloaded. If not, let's go back and do that first. -You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz) +You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz) and untar the contents within by issuing: @@ -149,7 +149,7 @@ druid.port=8081 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83"] # Dummy read only AWS account (used to download example data) druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b @@ -240,7 +240,7 @@ druid.port=8083 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83"] # Change this config to db to hand off to the rest of the Druid cluster druid.publish.type=noop diff --git a/docs/content/Tutorial:-Webstream.md b/docs/content/Tutorial:-Webstream.md index cd2bfb5eb9a..78f3837bbdb 100644 --- a/docs/content/Tutorial:-Webstream.md +++ b/docs/content/Tutorial:-Webstream.md @@ -37,7 +37,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu h3. Download a Tarball -We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz) +We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz) Download this file to a directory of your choosing. You can extract the awesomeness within by issuing: @@ -48,7 +48,7 @@ tar zxvf druid-services-*-bin.tar.gz Not too lost so far right? That's great! If you cd into the directory: ``` -cd druid-services-0.6.81 +cd druid-services-0.6.83 ``` You should see a bunch of files: diff --git a/docs/content/Twitter-Tutorial.textile b/docs/content/Twitter-Tutorial.textile index 6decc746b17..af9f4511518 100644 --- a/docs/content/Twitter-Tutorial.textile +++ b/docs/content/Twitter-Tutorial.textile @@ -9,7 +9,7 @@ There are two ways to setup Druid: download a tarball, or build it from source. h3. Download a Tarball -We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz. +We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz. Download this bad boy to a directory of your choosing. You can extract the awesomeness within by issuing: diff --git a/examples/config/historical/runtime.properties b/examples/config/historical/runtime.properties index 1dffb2cf8ff..402ef01f5b4 100644 --- a/examples/config/historical/runtime.properties +++ b/examples/config/historical/runtime.properties @@ -4,7 +4,7 @@ druid.port=8081 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83"] # Dummy read only AWS account (used to download example data) druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b diff --git a/examples/config/realtime/runtime.properties b/examples/config/realtime/runtime.properties index 94ec5bafd19..13dbbe2945c 100644 --- a/examples/config/realtime/runtime.properties +++ b/examples/config/realtime/runtime.properties @@ -4,7 +4,7 @@ druid.port=8083 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81","io.druid.extensions:druid-rabbitmq:0.6.81"] +druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83","io.druid.extensions:druid-rabbitmq:0.6.83"] # Change this config to db to hand off to the rest of the Druid cluster druid.publish.type=noop diff --git a/pom.xml b/pom.xml index 2ae2afb18ea..c420c2298d8 100644 --- a/pom.xml +++ b/pom.xml @@ -30,7 +30,7 @@ scm:git:ssh://git@github.com/metamx/druid.git scm:git:ssh://git@github.com/metamx/druid.git http://www.github.com/metamx/druid - druid-0.6.81-SNAPSHOT + druid-0.6.83-SNAPSHOT diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index 88582af0947..5d1a4b7a105 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -55,7 +55,7 @@ import java.util.List; */ @Command( name = "broker", - description = "Runs a broker node, see http://druid.io/docs/0.6.81/Broker.html for a description" + description = "Runs a broker node, see http://druid.io/docs/0.6.83/Broker.html for a description" ) public class CliBroker extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java index 2b2b17ec0fd..8b971d1c818 100644 --- a/services/src/main/java/io/druid/cli/CliCoordinator.java +++ b/services/src/main/java/io/druid/cli/CliCoordinator.java @@ -66,7 +66,7 @@ import java.util.List; */ @Command( name = "coordinator", - description = "Runs the Coordinator, see http://druid.io/docs/0.6.81/Coordinator.html for a description." + description = "Runs the Coordinator, see http://druid.io/docs/0.6.83/Coordinator.html for a description." ) public class CliCoordinator extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java index 838d001a714..291ac16796d 100644 --- a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java @@ -41,7 +41,7 @@ import java.util.List; */ @Command( name = "hadoop", - description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.81/Batch-ingestion.html for a description." + description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.83/Batch-ingestion.html for a description." ) public class CliHadoopIndexer implements Runnable { diff --git a/services/src/main/java/io/druid/cli/CliHistorical.java b/services/src/main/java/io/druid/cli/CliHistorical.java index 3a41e35e22c..cc6f7cb68dd 100644 --- a/services/src/main/java/io/druid/cli/CliHistorical.java +++ b/services/src/main/java/io/druid/cli/CliHistorical.java @@ -46,7 +46,7 @@ import java.util.List; */ @Command( name = "historical", - description = "Runs a Historical node, see http://druid.io/docs/0.6.81/Historical.html for a description" + description = "Runs a Historical node, see http://druid.io/docs/0.6.83/Historical.html for a description" ) public class CliHistorical extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java index e7208224033..c22f17f730f 100644 --- a/services/src/main/java/io/druid/cli/CliOverlord.java +++ b/services/src/main/java/io/druid/cli/CliOverlord.java @@ -93,7 +93,7 @@ import java.util.List; */ @Command( name = "overlord", - description = "Runs an Overlord node, see http://druid.io/docs/0.6.81/Indexing-Service.html for a description" + description = "Runs an Overlord node, see http://druid.io/docs/0.6.83/Indexing-Service.html for a description" ) public class CliOverlord extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliRealtime.java b/services/src/main/java/io/druid/cli/CliRealtime.java index fcf55932d49..09a481980b5 100644 --- a/services/src/main/java/io/druid/cli/CliRealtime.java +++ b/services/src/main/java/io/druid/cli/CliRealtime.java @@ -30,7 +30,7 @@ import java.util.List; */ @Command( name = "realtime", - description = "Runs a realtime node, see http://druid.io/docs/0.6.81/Realtime.html for a description" + description = "Runs a realtime node, see http://druid.io/docs/0.6.83/Realtime.html for a description" ) public class CliRealtime extends ServerRunnable { From f612b7cc64e9d15b6d7c5c49b594e600f9a11a2f Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 12:45:20 -0700 Subject: [PATCH 16/22] [maven-release-plugin] prepare release druid-0.6.83 --- cassandra-storage/pom.xml | 2 +- common/pom.xml | 2 +- examples/pom.xml | 2 +- hdfs-storage/pom.xml | 2 +- hll/pom.xml | 2 +- indexing-hadoop/pom.xml | 2 +- indexing-service/pom.xml | 2 +- kafka-eight/pom.xml | 2 +- kafka-seven/pom.xml | 2 +- pom.xml | 4 ++-- processing/pom.xml | 2 +- rabbitmq/pom.xml | 2 +- s3-extensions/pom.xml | 2 +- server/pom.xml | 2 +- services/pom.xml | 2 +- 15 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cassandra-storage/pom.xml b/cassandra-storage/pom.xml index e3fc69954a9..51408495bca 100644 --- a/cassandra-storage/pom.xml +++ b/cassandra-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/common/pom.xml b/common/pom.xml index c72acf6922a..73364c3b7ea 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/examples/pom.xml b/examples/pom.xml index 26b2d2d8b46..25a70cff26c 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/hdfs-storage/pom.xml b/hdfs-storage/pom.xml index 6e9919a3b1f..85a2dd21720 100644 --- a/hdfs-storage/pom.xml +++ b/hdfs-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/hll/pom.xml b/hll/pom.xml index 2e0c36b5ca6..708d7e17d7d 100644 --- a/hll/pom.xml +++ b/hll/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index 19af0cb921c..88db5fd53f1 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml index 83c456fcbfa..8da02225392 100644 --- a/indexing-service/pom.xml +++ b/indexing-service/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/kafka-eight/pom.xml b/kafka-eight/pom.xml index 1c1df24d020..46031b9750c 100644 --- a/kafka-eight/pom.xml +++ b/kafka-eight/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/kafka-seven/pom.xml b/kafka-seven/pom.xml index 96594460983..7787d2670cb 100644 --- a/kafka-seven/pom.xml +++ b/kafka-seven/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/pom.xml b/pom.xml index c420c2298d8..057c05bee02 100644 --- a/pom.xml +++ b/pom.xml @@ -23,14 +23,14 @@ io.druid druid pom - 0.6.83-SNAPSHOT + 0.6.83 druid druid scm:git:ssh://git@github.com/metamx/druid.git scm:git:ssh://git@github.com/metamx/druid.git http://www.github.com/metamx/druid - druid-0.6.83-SNAPSHOT + druid-0.6.83 diff --git a/processing/pom.xml b/processing/pom.xml index 4169ea3be38..2d158bc5bde 100644 --- a/processing/pom.xml +++ b/processing/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/rabbitmq/pom.xml b/rabbitmq/pom.xml index 66d37cf381c..fcaf166fd54 100644 --- a/rabbitmq/pom.xml +++ b/rabbitmq/pom.xml @@ -9,7 +9,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/s3-extensions/pom.xml b/s3-extensions/pom.xml index 06f414ecc85..8351a6215f7 100644 --- a/s3-extensions/pom.xml +++ b/s3-extensions/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/server/pom.xml b/server/pom.xml index 34c9244f0ef..c09fb40201b 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 diff --git a/services/pom.xml b/services/pom.xml index 2029aed81fe..d718572bd77 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.83-SNAPSHOT + 0.6.83 From bfb66691b9954a9c9408a44117477b5938869462 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 12:45:28 -0700 Subject: [PATCH 17/22] [maven-release-plugin] prepare for next development iteration --- cassandra-storage/pom.xml | 2 +- common/pom.xml | 2 +- examples/pom.xml | 2 +- hdfs-storage/pom.xml | 2 +- hll/pom.xml | 2 +- indexing-hadoop/pom.xml | 2 +- indexing-service/pom.xml | 2 +- kafka-eight/pom.xml | 2 +- kafka-seven/pom.xml | 2 +- pom.xml | 4 ++-- processing/pom.xml | 2 +- rabbitmq/pom.xml | 2 +- s3-extensions/pom.xml | 2 +- server/pom.xml | 2 +- services/pom.xml | 2 +- 15 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cassandra-storage/pom.xml b/cassandra-storage/pom.xml index 51408495bca..9e7e23e0337 100644 --- a/cassandra-storage/pom.xml +++ b/cassandra-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index 73364c3b7ea..ca5cdce59a4 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/examples/pom.xml b/examples/pom.xml index 25a70cff26c..35f581223f3 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/hdfs-storage/pom.xml b/hdfs-storage/pom.xml index 85a2dd21720..3f7499a3dc7 100644 --- a/hdfs-storage/pom.xml +++ b/hdfs-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/hll/pom.xml b/hll/pom.xml index 708d7e17d7d..d7ecd1e39c5 100644 --- a/hll/pom.xml +++ b/hll/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index 88db5fd53f1..1002cd433cc 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml index 8da02225392..f2303ae20d7 100644 --- a/indexing-service/pom.xml +++ b/indexing-service/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/kafka-eight/pom.xml b/kafka-eight/pom.xml index 46031b9750c..915e09a2ca6 100644 --- a/kafka-eight/pom.xml +++ b/kafka-eight/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/kafka-seven/pom.xml b/kafka-seven/pom.xml index 7787d2670cb..a4a60933d3a 100644 --- a/kafka-seven/pom.xml +++ b/kafka-seven/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/pom.xml b/pom.xml index 057c05bee02..627dbb7277b 100644 --- a/pom.xml +++ b/pom.xml @@ -23,14 +23,14 @@ io.druid druid pom - 0.6.83 + 0.6.84-SNAPSHOT druid druid scm:git:ssh://git@github.com/metamx/druid.git scm:git:ssh://git@github.com/metamx/druid.git http://www.github.com/metamx/druid - druid-0.6.83 + druid-0.6.83-SNAPSHOT diff --git a/processing/pom.xml b/processing/pom.xml index 2d158bc5bde..467b59f534d 100644 --- a/processing/pom.xml +++ b/processing/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/rabbitmq/pom.xml b/rabbitmq/pom.xml index fcaf166fd54..0506e244863 100644 --- a/rabbitmq/pom.xml +++ b/rabbitmq/pom.xml @@ -9,7 +9,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/s3-extensions/pom.xml b/s3-extensions/pom.xml index 8351a6215f7..b6a731f9607 100644 --- a/s3-extensions/pom.xml +++ b/s3-extensions/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/server/pom.xml b/server/pom.xml index c09fb40201b..3c67f6743c9 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT diff --git a/services/pom.xml b/services/pom.xml index d718572bd77..3de86d664ad 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.83 + 0.6.84-SNAPSHOT From bae0fdf936f54728409667d69fe5be4938da0826 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 13:18:55 -0700 Subject: [PATCH 18/22] add context serde tests and deprecate backwards compatible methods --- .../src/main/java/io/druid/query/Query.java | 10 +-- .../timeboundary/TimeBoundaryQueryTest.java | 80 ++++++++++++++++++- 2 files changed, 83 insertions(+), 7 deletions(-) diff --git a/processing/src/main/java/io/druid/query/Query.java b/processing/src/main/java/io/druid/query/Query.java index 2de75e57455..9b9c9e373f9 100644 --- a/processing/src/main/java/io/druid/query/Query.java +++ b/processing/src/main/java/io/druid/query/Query.java @@ -75,11 +75,11 @@ public interface Query public ContextType getContextValue(String key, ContextType defaultValue); // For backwards compatibility - public int getContextPriority(int defaultValue); - public boolean getContextBySegment(boolean defaultValue); - public boolean getContextPopulateCache(boolean defaultValue); - public boolean getContextUseCache(boolean defaultValue); - public boolean getContextFinalize(boolean defaultValue); + @Deprecated public int getContextPriority(int defaultValue); + @Deprecated public boolean getContextBySegment(boolean defaultValue); + @Deprecated public boolean getContextPopulateCache(boolean defaultValue); + @Deprecated public boolean getContextUseCache(boolean defaultValue); + @Deprecated public boolean getContextFinalize(boolean defaultValue); public Query withOverriddenContext(Map contextOverride); diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java index 1dd50e9493d..b88f290984b 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryTest.java @@ -22,6 +22,7 @@ package io.druid.query.timeboundary; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.Druids; import io.druid.query.Query; @@ -38,8 +39,8 @@ public class TimeBoundaryQueryTest public void testQuerySerialization() throws IOException { Query query = Druids.newTimeBoundaryQueryBuilder() - .dataSource("testing") - .build(); + .dataSource("testing") + .build(); String json = jsonMapper.writeValueAsString(query); Query serdeQuery = jsonMapper.readValue(json, Query.class); @@ -47,4 +48,79 @@ public class TimeBoundaryQueryTest Assert.assertEquals(query, serdeQuery); } + @Test + public void testContextSerde() throws Exception + { + final TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder() + .dataSource("foo") + .intervals("2013/2014") + .context( + ImmutableMap.of( + "priority", + 1, + "useCache", + true, + "populateCache", + true, + "finalize", + true + ) + ).build(); + + final ObjectMapper mapper = new DefaultObjectMapper(); + + final TimeBoundaryQuery serdeQuery = mapper.readValue( + mapper.writeValueAsBytes( + mapper.readValue( + mapper.writeValueAsString( + query + ), TimeBoundaryQuery.class + ) + ), TimeBoundaryQuery.class + ); + + + Assert.assertEquals(1, serdeQuery.getContextValue("priority")); + Assert.assertEquals(true, serdeQuery.getContextValue("useCache")); + Assert.assertEquals(true, serdeQuery.getContextValue("populateCache")); + Assert.assertEquals(true, serdeQuery.getContextValue("finalize")); + } + + @Test + public void testContextSerde2() throws Exception + { + final TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder() + .dataSource("foo") + .intervals("2013/2014") + .context( + ImmutableMap.of( + "priority", + "1", + "useCache", + "true", + "populateCache", + "true", + "finalize", + "true" + ) + ).build(); + + final ObjectMapper mapper = new DefaultObjectMapper(); + + final TimeBoundaryQuery serdeQuery = mapper.readValue( + mapper.writeValueAsBytes( + mapper.readValue( + mapper.writeValueAsString( + query + ), TimeBoundaryQuery.class + ) + ), TimeBoundaryQuery.class + ); + + + Assert.assertEquals("1", serdeQuery.getContextValue("priority")); + Assert.assertEquals("true", serdeQuery.getContextValue("useCache")); + Assert.assertEquals("true", serdeQuery.getContextValue("populateCache")); + Assert.assertEquals("true", serdeQuery.getContextValue("finalize")); + } } From 6a26f7684fd0e59ceaadc1b83c85fd8b8da46dd7 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 14:36:13 -0700 Subject: [PATCH 19/22] fix context on CQE --- .../main/java/io/druid/query/ChainedExecutionQueryRunner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java index 3e3e6b03243..776276415a2 100644 --- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java @@ -83,7 +83,7 @@ public class ChainedExecutionQueryRunner implements QueryRunner @Override public Sequence run(final Query query) { - final int priority = query.getContextValue("priority", 0); + final int priority = query.getContextPriority(0); return new BaseSequence>( new BaseSequence.IteratorMaker>() From 4ef2bbbfc1df702e47ac3409cbf7d64b002d63c7 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 14:37:02 -0700 Subject: [PATCH 20/22] prepare for next release --- build.sh | 2 +- docs/content/Examples.md | 4 ++-- docs/content/Indexing-Service-Config.md | 4 ++-- docs/content/Realtime-Config.md | 4 ++-- docs/content/Tutorial:-A-First-Look-at-Druid.md | 4 ++-- docs/content/Tutorial:-The-Druid-Cluster.md | 6 +++--- docs/content/Tutorial:-Webstream.md | 4 ++-- docs/content/Twitter-Tutorial.textile | 2 +- examples/config/historical/runtime.properties | 2 +- examples/config/realtime/runtime.properties | 2 +- pom.xml | 2 +- services/src/main/java/io/druid/cli/CliBroker.java | 2 +- services/src/main/java/io/druid/cli/CliCoordinator.java | 2 +- services/src/main/java/io/druid/cli/CliHadoopIndexer.java | 2 +- services/src/main/java/io/druid/cli/CliHistorical.java | 2 +- services/src/main/java/io/druid/cli/CliOverlord.java | 2 +- services/src/main/java/io/druid/cli/CliRealtime.java | 2 +- 17 files changed, 24 insertions(+), 24 deletions(-) diff --git a/build.sh b/build.sh index be57dcb3b62..e97f8f2b96c 100755 --- a/build.sh +++ b/build.sh @@ -30,4 +30,4 @@ echo "For examples, see: " echo " " ls -1 examples/*/*sh echo " " -echo "See also http://druid.io/docs/0.6.83" +echo "See also http://druid.io/docs/0.6.84" diff --git a/docs/content/Examples.md b/docs/content/Examples.md index 8b43f353eb8..ad7ecace99e 100644 --- a/docs/content/Examples.md +++ b/docs/content/Examples.md @@ -19,13 +19,13 @@ Clone Druid and build it: git clone https://github.com/metamx/druid.git druid cd druid git fetch --tags -git checkout druid-0.6.83 +git checkout druid-0.6.84 ./build.sh ``` ### Downloading the DSK (Druid Standalone Kit) -[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz) a stand-alone tarball and run it: +[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz) a stand-alone tarball and run it: ``` bash tar -xzf druid-services-0.X.X-bin.tar.gz diff --git a/docs/content/Indexing-Service-Config.md b/docs/content/Indexing-Service-Config.md index 366dd3b85f4..b1a10b0c444 100644 --- a/docs/content/Indexing-Service-Config.md +++ b/docs/content/Indexing-Service-Config.md @@ -66,7 +66,7 @@ druid.host=#{IP_ADDR}:8080 druid.port=8080 druid.service=druid/prod/indexer -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84"] druid.zk.service.host=#{ZK_IPs} druid.zk.paths.base=/druid/prod @@ -115,7 +115,7 @@ druid.host=#{IP_ADDR}:8080 druid.port=8080 druid.service=druid/prod/worker -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84"] druid.zk.service.host=#{ZK_IPs} druid.zk.paths.base=/druid/prod diff --git a/docs/content/Realtime-Config.md b/docs/content/Realtime-Config.md index 9c1ce7c9bed..5854aac4e06 100644 --- a/docs/content/Realtime-Config.md +++ b/docs/content/Realtime-Config.md @@ -27,7 +27,7 @@ druid.host=localhost druid.service=realtime druid.port=8083 -druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.84"] druid.zk.service.host=localhost @@ -76,7 +76,7 @@ druid.host=#{IP_ADDR}:8080 druid.port=8080 druid.service=druid/prod/realtime -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84"] druid.zk.service.host=#{ZK_IPs} druid.zk.paths.base=/druid/prod diff --git a/docs/content/Tutorial:-A-First-Look-at-Druid.md b/docs/content/Tutorial:-A-First-Look-at-Druid.md index 8f5a2ce5dd1..9f0800fe9d8 100644 --- a/docs/content/Tutorial:-A-First-Look-at-Druid.md +++ b/docs/content/Tutorial:-A-First-Look-at-Druid.md @@ -49,7 +49,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu ### Download a Tarball -We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz). Download this file to a directory of your choosing. +We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz). Download this file to a directory of your choosing. You can extract the awesomeness within by issuing: @@ -60,7 +60,7 @@ tar -zxvf druid-services-*-bin.tar.gz Not too lost so far right? That's great! If you cd into the directory: ``` -cd druid-services-0.6.83 +cd druid-services-0.6.84 ``` You should see a bunch of files: diff --git a/docs/content/Tutorial:-The-Druid-Cluster.md b/docs/content/Tutorial:-The-Druid-Cluster.md index 2a8e2245f37..b72a3050e40 100644 --- a/docs/content/Tutorial:-The-Druid-Cluster.md +++ b/docs/content/Tutorial:-The-Druid-Cluster.md @@ -13,7 +13,7 @@ In this tutorial, we will set up other types of Druid nodes and external depende If you followed the first tutorial, you should already have Druid downloaded. If not, let's go back and do that first. -You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz) +You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz) and untar the contents within by issuing: @@ -149,7 +149,7 @@ druid.port=8081 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84"] # Dummy read only AWS account (used to download example data) druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b @@ -240,7 +240,7 @@ druid.port=8083 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84"] # Change this config to db to hand off to the rest of the Druid cluster druid.publish.type=noop diff --git a/docs/content/Tutorial:-Webstream.md b/docs/content/Tutorial:-Webstream.md index 78f3837bbdb..54b40410d61 100644 --- a/docs/content/Tutorial:-Webstream.md +++ b/docs/content/Tutorial:-Webstream.md @@ -37,7 +37,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu h3. Download a Tarball -We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz) +We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz) Download this file to a directory of your choosing. You can extract the awesomeness within by issuing: @@ -48,7 +48,7 @@ tar zxvf druid-services-*-bin.tar.gz Not too lost so far right? That's great! If you cd into the directory: ``` -cd druid-services-0.6.83 +cd druid-services-0.6.84 ``` You should see a bunch of files: diff --git a/docs/content/Twitter-Tutorial.textile b/docs/content/Twitter-Tutorial.textile index af9f4511518..64580a74cf7 100644 --- a/docs/content/Twitter-Tutorial.textile +++ b/docs/content/Twitter-Tutorial.textile @@ -9,7 +9,7 @@ There are two ways to setup Druid: download a tarball, or build it from source. h3. Download a Tarball -We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.83-bin.tar.gz. +We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz. Download this bad boy to a directory of your choosing. You can extract the awesomeness within by issuing: diff --git a/examples/config/historical/runtime.properties b/examples/config/historical/runtime.properties index 402ef01f5b4..ee085149e60 100644 --- a/examples/config/historical/runtime.properties +++ b/examples/config/historical/runtime.properties @@ -4,7 +4,7 @@ druid.port=8081 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84"] # Dummy read only AWS account (used to download example data) druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b diff --git a/examples/config/realtime/runtime.properties b/examples/config/realtime/runtime.properties index 13dbbe2945c..571e184f8a1 100644 --- a/examples/config/realtime/runtime.properties +++ b/examples/config/realtime/runtime.properties @@ -4,7 +4,7 @@ druid.port=8083 druid.zk.service.host=localhost -druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.83","io.druid.extensions:druid-kafka-seven:0.6.83","io.druid.extensions:druid-rabbitmq:0.6.83"] +druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84","io.druid.extensions:druid-rabbitmq:0.6.84"] # Change this config to db to hand off to the rest of the Druid cluster druid.publish.type=noop diff --git a/pom.xml b/pom.xml index 627dbb7277b..88bbcb44d48 100644 --- a/pom.xml +++ b/pom.xml @@ -30,7 +30,7 @@ scm:git:ssh://git@github.com/metamx/druid.git scm:git:ssh://git@github.com/metamx/druid.git http://www.github.com/metamx/druid - druid-0.6.83-SNAPSHOT + druid-0.6.84-SNAPSHOT diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index 5d1a4b7a105..c145a053106 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -55,7 +55,7 @@ import java.util.List; */ @Command( name = "broker", - description = "Runs a broker node, see http://druid.io/docs/0.6.83/Broker.html for a description" + description = "Runs a broker node, see http://druid.io/docs/0.6.84/Broker.html for a description" ) public class CliBroker extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java index 8b971d1c818..f04d894f638 100644 --- a/services/src/main/java/io/druid/cli/CliCoordinator.java +++ b/services/src/main/java/io/druid/cli/CliCoordinator.java @@ -66,7 +66,7 @@ import java.util.List; */ @Command( name = "coordinator", - description = "Runs the Coordinator, see http://druid.io/docs/0.6.83/Coordinator.html for a description." + description = "Runs the Coordinator, see http://druid.io/docs/0.6.84/Coordinator.html for a description." ) public class CliCoordinator extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java index 291ac16796d..8532215521a 100644 --- a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java @@ -41,7 +41,7 @@ import java.util.List; */ @Command( name = "hadoop", - description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.83/Batch-ingestion.html for a description." + description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.84/Batch-ingestion.html for a description." ) public class CliHadoopIndexer implements Runnable { diff --git a/services/src/main/java/io/druid/cli/CliHistorical.java b/services/src/main/java/io/druid/cli/CliHistorical.java index cc6f7cb68dd..941d31521d5 100644 --- a/services/src/main/java/io/druid/cli/CliHistorical.java +++ b/services/src/main/java/io/druid/cli/CliHistorical.java @@ -46,7 +46,7 @@ import java.util.List; */ @Command( name = "historical", - description = "Runs a Historical node, see http://druid.io/docs/0.6.83/Historical.html for a description" + description = "Runs a Historical node, see http://druid.io/docs/0.6.84/Historical.html for a description" ) public class CliHistorical extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java index c22f17f730f..f2c6221a723 100644 --- a/services/src/main/java/io/druid/cli/CliOverlord.java +++ b/services/src/main/java/io/druid/cli/CliOverlord.java @@ -93,7 +93,7 @@ import java.util.List; */ @Command( name = "overlord", - description = "Runs an Overlord node, see http://druid.io/docs/0.6.83/Indexing-Service.html for a description" + description = "Runs an Overlord node, see http://druid.io/docs/0.6.84/Indexing-Service.html for a description" ) public class CliOverlord extends ServerRunnable { diff --git a/services/src/main/java/io/druid/cli/CliRealtime.java b/services/src/main/java/io/druid/cli/CliRealtime.java index 09a481980b5..012ee37b31c 100644 --- a/services/src/main/java/io/druid/cli/CliRealtime.java +++ b/services/src/main/java/io/druid/cli/CliRealtime.java @@ -30,7 +30,7 @@ import java.util.List; */ @Command( name = "realtime", - description = "Runs a realtime node, see http://druid.io/docs/0.6.83/Realtime.html for a description" + description = "Runs a realtime node, see http://druid.io/docs/0.6.84/Realtime.html for a description" ) public class CliRealtime extends ServerRunnable { From e1e81cfeacd131c401a898ea11a0d0a3768ac44c Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 14:38:51 -0700 Subject: [PATCH 21/22] [maven-release-plugin] prepare release druid-0.6.84 --- cassandra-storage/pom.xml | 2 +- common/pom.xml | 2 +- examples/pom.xml | 2 +- hdfs-storage/pom.xml | 2 +- hll/pom.xml | 2 +- indexing-hadoop/pom.xml | 2 +- indexing-service/pom.xml | 2 +- kafka-eight/pom.xml | 2 +- kafka-seven/pom.xml | 2 +- pom.xml | 4 ++-- processing/pom.xml | 2 +- rabbitmq/pom.xml | 2 +- s3-extensions/pom.xml | 2 +- server/pom.xml | 2 +- services/pom.xml | 2 +- 15 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cassandra-storage/pom.xml b/cassandra-storage/pom.xml index 9e7e23e0337..92f48380eb0 100644 --- a/cassandra-storage/pom.xml +++ b/cassandra-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/common/pom.xml b/common/pom.xml index ca5cdce59a4..bed26256eba 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/examples/pom.xml b/examples/pom.xml index 35f581223f3..d47884cd55b 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/hdfs-storage/pom.xml b/hdfs-storage/pom.xml index 3f7499a3dc7..a71dda5e115 100644 --- a/hdfs-storage/pom.xml +++ b/hdfs-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/hll/pom.xml b/hll/pom.xml index d7ecd1e39c5..b59cd001a22 100644 --- a/hll/pom.xml +++ b/hll/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index 1002cd433cc..52c4809adba 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml index f2303ae20d7..55f170cb97a 100644 --- a/indexing-service/pom.xml +++ b/indexing-service/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/kafka-eight/pom.xml b/kafka-eight/pom.xml index 915e09a2ca6..69be3ddbf28 100644 --- a/kafka-eight/pom.xml +++ b/kafka-eight/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/kafka-seven/pom.xml b/kafka-seven/pom.xml index a4a60933d3a..10d67f55eef 100644 --- a/kafka-seven/pom.xml +++ b/kafka-seven/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/pom.xml b/pom.xml index 88bbcb44d48..e15c16d14c0 100644 --- a/pom.xml +++ b/pom.xml @@ -23,14 +23,14 @@ io.druid druid pom - 0.6.84-SNAPSHOT + 0.6.84 druid druid scm:git:ssh://git@github.com/metamx/druid.git scm:git:ssh://git@github.com/metamx/druid.git http://www.github.com/metamx/druid - druid-0.6.84-SNAPSHOT + druid-0.6.84 diff --git a/processing/pom.xml b/processing/pom.xml index 467b59f534d..fab5dea6278 100644 --- a/processing/pom.xml +++ b/processing/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/rabbitmq/pom.xml b/rabbitmq/pom.xml index 0506e244863..4f89c484aec 100644 --- a/rabbitmq/pom.xml +++ b/rabbitmq/pom.xml @@ -9,7 +9,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/s3-extensions/pom.xml b/s3-extensions/pom.xml index b6a731f9607..d8976b9eabf 100644 --- a/s3-extensions/pom.xml +++ b/s3-extensions/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/server/pom.xml b/server/pom.xml index 3c67f6743c9..b5531229d7d 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 diff --git a/services/pom.xml b/services/pom.xml index 3de86d664ad..6fe10ac358b 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.84-SNAPSHOT + 0.6.84 From 8e3bcc8fc35299dea893d76a319d7bf7dd7d1b01 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 7 Apr 2014 14:38:55 -0700 Subject: [PATCH 22/22] [maven-release-plugin] prepare for next development iteration --- cassandra-storage/pom.xml | 2 +- common/pom.xml | 2 +- examples/pom.xml | 2 +- hdfs-storage/pom.xml | 2 +- hll/pom.xml | 2 +- indexing-hadoop/pom.xml | 2 +- indexing-service/pom.xml | 2 +- kafka-eight/pom.xml | 2 +- kafka-seven/pom.xml | 2 +- pom.xml | 4 ++-- processing/pom.xml | 2 +- rabbitmq/pom.xml | 2 +- s3-extensions/pom.xml | 2 +- server/pom.xml | 2 +- services/pom.xml | 2 +- 15 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cassandra-storage/pom.xml b/cassandra-storage/pom.xml index 92f48380eb0..57afe2e7b32 100644 --- a/cassandra-storage/pom.xml +++ b/cassandra-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index bed26256eba..9142ed9301c 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/examples/pom.xml b/examples/pom.xml index d47884cd55b..145796c3300 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/hdfs-storage/pom.xml b/hdfs-storage/pom.xml index a71dda5e115..be4433d6925 100644 --- a/hdfs-storage/pom.xml +++ b/hdfs-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/hll/pom.xml b/hll/pom.xml index b59cd001a22..a3d6cdd0324 100644 --- a/hll/pom.xml +++ b/hll/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index 52c4809adba..56f0e4a1fa7 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml index 55f170cb97a..8aefe2c44b5 100644 --- a/indexing-service/pom.xml +++ b/indexing-service/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/kafka-eight/pom.xml b/kafka-eight/pom.xml index 69be3ddbf28..3e212fab75d 100644 --- a/kafka-eight/pom.xml +++ b/kafka-eight/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/kafka-seven/pom.xml b/kafka-seven/pom.xml index 10d67f55eef..1ed8a898196 100644 --- a/kafka-seven/pom.xml +++ b/kafka-seven/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/pom.xml b/pom.xml index e15c16d14c0..7598d927b3c 100644 --- a/pom.xml +++ b/pom.xml @@ -23,14 +23,14 @@ io.druid druid pom - 0.6.84 + 0.6.85-SNAPSHOT druid druid scm:git:ssh://git@github.com/metamx/druid.git scm:git:ssh://git@github.com/metamx/druid.git http://www.github.com/metamx/druid - druid-0.6.84 + druid-0.6.84-SNAPSHOT diff --git a/processing/pom.xml b/processing/pom.xml index fab5dea6278..780593940cc 100644 --- a/processing/pom.xml +++ b/processing/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/rabbitmq/pom.xml b/rabbitmq/pom.xml index 4f89c484aec..fb01277a7b4 100644 --- a/rabbitmq/pom.xml +++ b/rabbitmq/pom.xml @@ -9,7 +9,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/s3-extensions/pom.xml b/s3-extensions/pom.xml index d8976b9eabf..fd0a703b7b0 100644 --- a/s3-extensions/pom.xml +++ b/s3-extensions/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/server/pom.xml b/server/pom.xml index b5531229d7d..877d1153efc 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT diff --git a/services/pom.xml b/services/pom.xml index 6fe10ac358b..8c86d9962ba 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.84 + 0.6.85-SNAPSHOT