diff --git a/build.sh b/build.sh
index 25b027b5626..e97f8f2b96c 100755
--- a/build.sh
+++ b/build.sh
@@ -30,4 +30,4 @@ echo "For examples, see: "
echo " "
ls -1 examples/*/*sh
echo " "
-echo "See also http://druid.io/docs/0.6.81"
+echo "See also http://druid.io/docs/0.6.84"
diff --git a/cassandra-storage/pom.xml b/cassandra-storage/pom.xml
index e3fc69954a9..57afe2e7b32 100644
--- a/cassandra-storage/pom.xml
+++ b/cassandra-storage/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/common/pom.xml b/common/pom.xml
index c72acf6922a..9142ed9301c 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/docs/content/Examples.md b/docs/content/Examples.md
index beb436c78a9..ad7ecace99e 100644
--- a/docs/content/Examples.md
+++ b/docs/content/Examples.md
@@ -19,13 +19,13 @@ Clone Druid and build it:
git clone https://github.com/metamx/druid.git druid
cd druid
git fetch --tags
-git checkout druid-0.6.81
+git checkout druid-0.6.84
./build.sh
```
### Downloading the DSK (Druid Standalone Kit)
-[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz) a stand-alone tarball and run it:
+[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz) a stand-alone tarball and run it:
``` bash
tar -xzf druid-services-0.X.X-bin.tar.gz
diff --git a/docs/content/Indexing-Service-Config.md b/docs/content/Indexing-Service-Config.md
index 19646a5ac47..b1a10b0c444 100644
--- a/docs/content/Indexing-Service-Config.md
+++ b/docs/content/Indexing-Service-Config.md
@@ -66,7 +66,7 @@ druid.host=#{IP_ADDR}:8080
druid.port=8080
druid.service=druid/prod/indexer
-druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84"]
druid.zk.service.host=#{ZK_IPs}
druid.zk.paths.base=/druid/prod
@@ -115,7 +115,7 @@ druid.host=#{IP_ADDR}:8080
druid.port=8080
druid.service=druid/prod/worker
-druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84"]
druid.zk.service.host=#{ZK_IPs}
druid.zk.paths.base=/druid/prod
diff --git a/docs/content/Realtime-Config.md b/docs/content/Realtime-Config.md
index 410fe8e4bce..5854aac4e06 100644
--- a/docs/content/Realtime-Config.md
+++ b/docs/content/Realtime-Config.md
@@ -27,7 +27,7 @@ druid.host=localhost
druid.service=realtime
druid.port=8083
-druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.84"]
druid.zk.service.host=localhost
@@ -76,7 +76,7 @@ druid.host=#{IP_ADDR}:8080
druid.port=8080
druid.service=druid/prod/realtime
-druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84"]
druid.zk.service.host=#{ZK_IPs}
druid.zk.paths.base=/druid/prod
diff --git a/docs/content/Tasks.md b/docs/content/Tasks.md
index 799b934bca5..061f02d872a 100644
--- a/docs/content/Tasks.md
+++ b/docs/content/Tasks.md
@@ -51,12 +51,12 @@ The Index Task is a simpler variation of the Index Hadoop task that is designed
|--------|-----------|---------|
|type|The task type, this should always be "index".|yes|
|id|The task ID. If this is not explicitly specified, Druid generates the task ID using the name of the task file and date-time stamp. |no|
-|granularitySpec|Specifies the segment chunks that the task will process. `type` is always "uniform"; `gran` sets the granularity of the chunks ("DAY" means all segments containing timestamps in the same day, while `intervals` sets the interval that the chunks will cover.|yes|
+|granularitySpec|Specifies the segment chunks that the task will process. `type` is always "uniform"; `gran` sets the granularity of the chunks ("DAY" means all segments containing timestamps in the same day), while `intervals` sets the interval that the chunks will cover.|yes|
|spatialDimensions|Dimensions to build spatial indexes over. See [Geographic Queries](GeographicQueries.html).|no|
-|aggregators|The metrics to aggregate in the data set. For more info, see [Aggregations](Aggregations.html)|yes|
+|aggregators|The metrics to aggregate in the data set. For more info, see [Aggregations](Aggregations.html).|yes|
|indexGranularity|The rollup granularity for timestamps. See [Realtime Ingestion](Realtime-ingestion.html) for more information. |no|
|targetPartitionSize|Used in sharding. Determines how many rows are in each segment.|no|
-|firehose|The input source of data. For more info, see [Firehose](Firehose.html)|yes|
+|firehose|The input source of data. For more info, see [Firehose](Firehose.html).|yes|
|rowFlushBoundary|Used in determining when intermediate persist should occur to disk.|no|
### Index Hadoop Task
diff --git a/docs/content/Tutorial:-A-First-Look-at-Druid.md b/docs/content/Tutorial:-A-First-Look-at-Druid.md
index 468e78c310c..9f0800fe9d8 100644
--- a/docs/content/Tutorial:-A-First-Look-at-Druid.md
+++ b/docs/content/Tutorial:-A-First-Look-at-Druid.md
@@ -49,7 +49,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu
### Download a Tarball
-We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz). Download this file to a directory of your choosing.
+We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz). Download this file to a directory of your choosing.
You can extract the awesomeness within by issuing:
@@ -60,7 +60,7 @@ tar -zxvf druid-services-*-bin.tar.gz
Not too lost so far right? That's great! If you cd into the directory:
```
-cd druid-services-0.6.81
+cd druid-services-0.6.84
```
You should see a bunch of files:
diff --git a/docs/content/Tutorial:-Loading-Your-Data-Part-1.md b/docs/content/Tutorial:-Loading-Your-Data-Part-1.md
index 122ce70ccc4..cc7106b53e3 100644
--- a/docs/content/Tutorial:-Loading-Your-Data-Part-1.md
+++ b/docs/content/Tutorial:-Loading-Your-Data-Part-1.md
@@ -136,7 +136,7 @@ Indexing the Data
To index the data and build a Druid segment, we are going to need to submit a task to the indexing service. This task should already exist:
```
-examples/indexing/index_task.json
+examples/indexing/wikipedia_index_task.json
```
Open up the file to see the following:
diff --git a/docs/content/Tutorial:-The-Druid-Cluster.md b/docs/content/Tutorial:-The-Druid-Cluster.md
index 215c2d83207..b72a3050e40 100644
--- a/docs/content/Tutorial:-The-Druid-Cluster.md
+++ b/docs/content/Tutorial:-The-Druid-Cluster.md
@@ -13,7 +13,7 @@ In this tutorial, we will set up other types of Druid nodes and external depende
If you followed the first tutorial, you should already have Druid downloaded. If not, let's go back and do that first.
-You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz)
+You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz)
and untar the contents within by issuing:
@@ -149,7 +149,7 @@ druid.port=8081
druid.zk.service.host=localhost
-druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84"]
# Dummy read only AWS account (used to download example data)
druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b
@@ -240,7 +240,7 @@ druid.port=8083
druid.zk.service.host=localhost
-druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84"]
# Change this config to db to hand off to the rest of the Druid cluster
druid.publish.type=noop
diff --git a/docs/content/Tutorial:-Webstream.md b/docs/content/Tutorial:-Webstream.md
index cd2bfb5eb9a..54b40410d61 100644
--- a/docs/content/Tutorial:-Webstream.md
+++ b/docs/content/Tutorial:-Webstream.md
@@ -37,7 +37,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu
h3. Download a Tarball
-We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz)
+We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz)
Download this file to a directory of your choosing.
You can extract the awesomeness within by issuing:
@@ -48,7 +48,7 @@ tar zxvf druid-services-*-bin.tar.gz
Not too lost so far right? That's great! If you cd into the directory:
```
-cd druid-services-0.6.81
+cd druid-services-0.6.84
```
You should see a bunch of files:
diff --git a/docs/content/Twitter-Tutorial.textile b/docs/content/Twitter-Tutorial.textile
index 6decc746b17..64580a74cf7 100644
--- a/docs/content/Twitter-Tutorial.textile
+++ b/docs/content/Twitter-Tutorial.textile
@@ -9,7 +9,7 @@ There are two ways to setup Druid: download a tarball, or build it from source.
h3. Download a Tarball
-We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.81-bin.tar.gz.
+We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.84-bin.tar.gz.
Download this bad boy to a directory of your choosing.
You can extract the awesomeness within by issuing:
diff --git a/docs/content/toc.textile b/docs/content/toc.textile
index 136aa730335..c17a7539c06 100644
--- a/docs/content/toc.textile
+++ b/docs/content/toc.textile
@@ -22,12 +22,6 @@ h2. Configuration
* "Broker":Broker-Config.html
* "Indexing Service":Indexing-Service-Config.html
-h2. Operations
-* "Extending Druid":./Modules.html
-* "Cluster Setup":./Cluster-setup.html
-* "Booting a Production Cluster":./Booting-a-production-cluster.html
-* "Performance FAQ":./Performance-FAQ.html
-
h2. Data Ingestion
* "Realtime":./Realtime-ingestion.html
* "Batch":./Batch-ingestion.html
@@ -36,6 +30,12 @@ h2. Data Ingestion
* "Data Formats":./Data_formats.html
* "Ingestion FAQ":./Ingestion-FAQ.html
+h2. Operations
+* "Extending Druid":./Modules.html
+* "Cluster Setup":./Cluster-setup.html
+* "Booting a Production Cluster":./Booting-a-production-cluster.html
+* "Performance FAQ":./Performance-FAQ.html
+
h2. Querying
* "Querying":./Querying.html
** "Filters":./Filters.html
diff --git a/examples/config/historical/runtime.properties b/examples/config/historical/runtime.properties
index 1dffb2cf8ff..ee085149e60 100644
--- a/examples/config/historical/runtime.properties
+++ b/examples/config/historical/runtime.properties
@@ -4,7 +4,7 @@ druid.port=8081
druid.zk.service.host=localhost
-druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.84"]
# Dummy read only AWS account (used to download example data)
druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b
diff --git a/examples/config/realtime/runtime.properties b/examples/config/realtime/runtime.properties
index 94ec5bafd19..571e184f8a1 100644
--- a/examples/config/realtime/runtime.properties
+++ b/examples/config/realtime/runtime.properties
@@ -4,7 +4,7 @@ druid.port=8083
druid.zk.service.host=localhost
-druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.81","io.druid.extensions:druid-kafka-seven:0.6.81","io.druid.extensions:druid-rabbitmq:0.6.81"]
+druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.84","io.druid.extensions:druid-kafka-seven:0.6.84","io.druid.extensions:druid-rabbitmq:0.6.84"]
# Change this config to db to hand off to the rest of the Druid cluster
druid.publish.type=noop
diff --git a/examples/pom.xml b/examples/pom.xml
index 26b2d2d8b46..145796c3300 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/hdfs-storage/pom.xml b/hdfs-storage/pom.xml
index 6e9919a3b1f..be4433d6925 100644
--- a/hdfs-storage/pom.xml
+++ b/hdfs-storage/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/hll/pom.xml b/hll/pom.xml
index 2e0c36b5ca6..a3d6cdd0324 100644
--- a/hll/pom.xml
+++ b/hll/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml
index 19af0cb921c..56f0e4a1fa7 100644
--- a/indexing-hadoop/pom.xml
+++ b/indexing-hadoop/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml
index 83c456fcbfa..8aefe2c44b5 100644
--- a/indexing-service/pom.xml
+++ b/indexing-service/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/kafka-eight/pom.xml b/kafka-eight/pom.xml
index 1c1df24d020..3e212fab75d 100644
--- a/kafka-eight/pom.xml
+++ b/kafka-eight/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/kafka-seven/pom.xml b/kafka-seven/pom.xml
index 96594460983..1ed8a898196 100644
--- a/kafka-seven/pom.xml
+++ b/kafka-seven/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/pom.xml b/pom.xml
index 2ae2afb18ea..7598d927b3c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -23,14 +23,14 @@
io.druid
druid
pom
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
druid
druid
scm:git:ssh://git@github.com/metamx/druid.git
scm:git:ssh://git@github.com/metamx/druid.git
http://www.github.com/metamx/druid
- druid-0.6.81-SNAPSHOT
+ druid-0.6.84-SNAPSHOT
diff --git a/processing/pom.xml b/processing/pom.xml
index 4169ea3be38..780593940cc 100644
--- a/processing/pom.xml
+++ b/processing/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java
index 71beaa26652..32d9c3256f4 100644
--- a/processing/src/main/java/io/druid/query/BaseQuery.java
+++ b/processing/src/main/java/io/druid/query/BaseQuery.java
@@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
+import com.metamx.common.ISE;
import com.metamx.common.guava.Sequence;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.Duration;
@@ -120,6 +121,67 @@ public abstract class BaseQuery implements Query
return retVal == null ? defaultValue : retVal;
}
+ @Override
+ public int getContextPriority(int defaultValue)
+ {
+ if (context == null) {
+ return defaultValue;
+ }
+ Object val = context.get("priority");
+ if (val == null) {
+ return defaultValue;
+ }
+ if (val instanceof String) {
+ return Integer.parseInt((String) val);
+ } else if (val instanceof Integer) {
+ return (int) val;
+ } else {
+ throw new ISE("Unknown type [%s]", val.getClass());
+ }
+ }
+
+ @Override
+ public boolean getContextBySegment(boolean defaultValue)
+ {
+ return parseBoolean("bySegment", defaultValue);
+ }
+
+ @Override
+ public boolean getContextPopulateCache(boolean defaultValue)
+ {
+ return parseBoolean("populateCache", defaultValue);
+ }
+
+ @Override
+ public boolean getContextUseCache(boolean defaultValue)
+ {
+ return parseBoolean("useCache", defaultValue);
+ }
+
+ @Override
+ public boolean getContextFinalize(boolean defaultValue)
+ {
+ return parseBoolean("finalize", defaultValue);
+ }
+
+ private boolean parseBoolean(String key, boolean defaultValue)
+ {
+ if (context == null) {
+ return defaultValue;
+ }
+ Object val = context.get(key);
+ if (val == null) {
+ return defaultValue;
+ }
+ if (val instanceof String) {
+ return Boolean.parseBoolean((String) val);
+ } else if (val instanceof Boolean) {
+ return (boolean) val;
+ } else {
+ throw new ISE("Unknown type [%s]. Cannot parse!", val.getClass());
+ }
+ }
+
protected Map computeOverridenContext(Map overrides)
{
Map overridden = Maps.newTreeMap();
diff --git a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java
index d6150f63456..44094d0216a 100644
--- a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java
@@ -53,7 +53,7 @@ public class BySegmentQueryRunner implements QueryRunner
@SuppressWarnings("unchecked")
public Sequence run(final Query query)
{
- if (Boolean.parseBoolean(query.getContextValue("bySegment"))) {
+ if (query.getContextBySegment(false)) {
final Sequence baseSequence = base.run(query);
return new Sequence()
{
diff --git a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java
index 8e666c30b16..13ca4dd75df 100644
--- a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java
@@ -37,7 +37,7 @@ public abstract class BySegmentSkippingQueryRunner implements QueryRunner
@Override
public Sequence run(Query query)
{
- if (Boolean.parseBoolean(query.getContextValue("bySegment"))) {
+ if (query.getContextBySegment(false)) {
return baseRunner.run(query);
}
diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java
index d3600068a23..776276415a2 100644
--- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java
@@ -83,7 +83,7 @@ public class ChainedExecutionQueryRunner implements QueryRunner
@Override
public Sequence run(final Query query)
{
- final int priority = Integer.parseInt((String) query.getContextValue("priority", "0"));
+ final int priority = query.getContextPriority(0);
return new BaseSequence>(
new BaseSequence.IteratorMaker>()
diff --git a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java
index 2880332e184..dee05888470 100644
--- a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java
@@ -48,8 +48,8 @@ public class FinalizeResultsQueryRunner implements QueryRunner
@Override
public Sequence run(final Query query)
{
- final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment"));
- final boolean shouldFinalize = Boolean.parseBoolean(query.getContextValue("finalize", "true"));
+ final boolean isBySegment = query.getContextBySegment(false);
+ final boolean shouldFinalize = query.getContextFinalize(true);
if (shouldFinalize) {
Function finalizerFn;
if (isBySegment) {
@@ -84,8 +84,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner
);
}
};
- }
- else {
+ } else {
finalizerFn = toolChest.makeMetricManipulatorFn(
query,
new MetricManipulationFn()
@@ -100,7 +99,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner
}
return Sequences.map(
- baseRunner.run(query.withOverriddenContext(ImmutableMap.of("finalize", "false"))),
+ baseRunner.run(query.withOverriddenContext(ImmutableMap.of("finalize", false))),
finalizerFn
);
}
diff --git a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java
index 10dde9b26ea..20817a772e5 100644
--- a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java
@@ -83,7 +83,7 @@ public class GroupByParallelQueryRunner implements QueryRunner
query,
configSupplier.get()
);
- final int priority = Integer.parseInt((String) query.getContextValue("priority", "0"));
+ final int priority = query.getContextPriority(0);
if (Iterables.isEmpty(queryables)) {
log.warn("No queryables found.");
diff --git a/processing/src/main/java/io/druid/query/Query.java b/processing/src/main/java/io/druid/query/Query.java
index 10a84328584..9b9c9e373f9 100644
--- a/processing/src/main/java/io/druid/query/Query.java
+++ b/processing/src/main/java/io/druid/query/Query.java
@@ -74,6 +74,13 @@ public interface Query
public ContextType getContextValue(String key, ContextType defaultValue);
+ // For backwards compatibility
+ @Deprecated public int getContextPriority(int defaultValue);
+ @Deprecated public boolean getContextBySegment(boolean defaultValue);
+ @Deprecated public boolean getContextPopulateCache(boolean defaultValue);
+ @Deprecated public boolean getContextUseCache(boolean defaultValue);
+ @Deprecated public boolean getContextFinalize(boolean defaultValue);
+
public Query withOverriddenContext(Map contextOverride);
public Query withQuerySegmentSpec(QuerySegmentSpec spec);
diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java
index ea58501635b..b8f63a5c4a5 100644
--- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java
+++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java
@@ -182,7 +182,7 @@ public class GroupByQueryEngine
final DimensionSelector dimSelector = dims.get(0);
final IndexedInts row = dimSelector.getRow();
- if (row.size() == 0) {
+ if (row == null || row.size() == 0) {
ByteBuffer newKey = key.duplicate();
newKey.putInt(dimSelector.getValueCardinality());
unaggregatedBuffers = updateValues(newKey, dims.subList(1, dims.size()));
diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java
index 6e14ef1c1f3..f559829d593 100644
--- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java
+++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java
@@ -294,7 +294,7 @@ public class SearchQueryQueryToolChest extends QueryToolChestof(
+ "priority",
+ 1,
+ "useCache",
+ true,
+ "populateCache",
+ true,
+ "finalize",
+ true
+ )
+ ).build();
+
+ final ObjectMapper mapper = new DefaultObjectMapper();
+
+ final TimeBoundaryQuery serdeQuery = mapper.readValue(
+ mapper.writeValueAsBytes(
+ mapper.readValue(
+ mapper.writeValueAsString(
+ query
+ ), TimeBoundaryQuery.class
+ )
+ ), TimeBoundaryQuery.class
+ );
+
+
+ Assert.assertEquals(1, serdeQuery.getContextValue("priority"));
+ Assert.assertEquals(true, serdeQuery.getContextValue("useCache"));
+ Assert.assertEquals(true, serdeQuery.getContextValue("populateCache"));
+ Assert.assertEquals(true, serdeQuery.getContextValue("finalize"));
+ }
+
+ @Test
+ public void testContextSerde2() throws Exception
+ {
+ final TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder()
+ .dataSource("foo")
+ .intervals("2013/2014")
+ .context(
+ ImmutableMap.of(
+ "priority",
+ "1",
+ "useCache",
+ "true",
+ "populateCache",
+ "true",
+ "finalize",
+ "true"
+ )
+ ).build();
+
+ final ObjectMapper mapper = new DefaultObjectMapper();
+
+ final TimeBoundaryQuery serdeQuery = mapper.readValue(
+ mapper.writeValueAsBytes(
+ mapper.readValue(
+ mapper.writeValueAsString(
+ query
+ ), TimeBoundaryQuery.class
+ )
+ ), TimeBoundaryQuery.class
+ );
+
+
+ Assert.assertEquals("1", serdeQuery.getContextValue("priority"));
+ Assert.assertEquals("true", serdeQuery.getContextValue("useCache"));
+ Assert.assertEquals("true", serdeQuery.getContextValue("populateCache"));
+ Assert.assertEquals("true", serdeQuery.getContextValue("finalize"));
+ }
}
diff --git a/rabbitmq/pom.xml b/rabbitmq/pom.xml
index 66d37cf381c..fb01277a7b4 100644
--- a/rabbitmq/pom.xml
+++ b/rabbitmq/pom.xml
@@ -9,7 +9,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/s3-extensions/pom.xml b/s3-extensions/pom.xml
index 06f414ecc85..fd0a703b7b0 100644
--- a/s3-extensions/pom.xml
+++ b/s3-extensions/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java
index 69b1843c125..e8e98987bf1 100644
--- a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java
+++ b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentArchiver.java
@@ -39,7 +39,7 @@ public class S3DataSegmentArchiver extends S3DataSegmentMover implements DataSeg
S3DataSegmentPusherConfig restoreConfig
)
{
- super(s3Client);
+ super(s3Client, restoreConfig);
this.archiveConfig = archiveConfig;
this.restoreConfig = restoreConfig;
}
diff --git a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java
index fbf18df4c18..d259ab185d2 100644
--- a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java
+++ b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java
@@ -30,6 +30,7 @@ import io.druid.segment.loading.DataSegmentMover;
import io.druid.segment.loading.SegmentLoadingException;
import io.druid.timeline.DataSegment;
import org.jets3t.service.ServiceException;
+import org.jets3t.service.acl.gs.GSAccessControlList;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.model.S3Object;
@@ -41,13 +42,16 @@ public class S3DataSegmentMover implements DataSegmentMover
private static final Logger log = new Logger(S3DataSegmentMover.class);
private final RestS3Service s3Client;
+ private final S3DataSegmentPusherConfig config;
@Inject
public S3DataSegmentMover(
- RestS3Service s3Client
+ RestS3Service s3Client,
+ S3DataSegmentPusherConfig config
)
{
this.s3Client = s3Client;
+ this.config = config;
}
@Override
@@ -124,7 +128,11 @@ public class S3DataSegmentMover implements DataSegmentMover
targetS3Bucket,
targetS3Path
);
- s3Client.moveObject(s3Bucket, s3Path, targetS3Bucket, new S3Object(targetS3Path), false);
+ final S3Object target = new S3Object(targetS3Path);
+ if(!config.getDisableAcl()) {
+ target.setAcl(GSAccessControlList.REST_CANNED_BUCKET_OWNER_FULL_CONTROL);
+ }
+ s3Client.moveObject(s3Bucket, s3Path, targetS3Bucket, target, false);
}
} else {
// ensure object exists in target location
diff --git a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java
index 664c270799b..5dad247544b 100644
--- a/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java
+++ b/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java
@@ -89,7 +89,7 @@ public class S3DataSegmentPusher implements DataSegmentPusher
toPush.setBucketName(outputBucket);
toPush.setKey(s3Path);
if (!config.getDisableAcl()) {
- toPush.setAcl(AccessControlList.REST_CANNED_AUTHENTICATED_READ);
+ toPush.setAcl(GSAccessControlList.REST_CANNED_BUCKET_OWNER_FULL_CONTROL);
}
log.info("Pushing %s.", toPush);
diff --git a/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java
index 6206da881a4..c13d22de5f1 100644
--- a/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java
+++ b/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java
@@ -62,7 +62,7 @@ public class S3DataSegmentMoverTest
public void testMove() throws Exception
{
MockStorageService mockS3Client = new MockStorageService();
- S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client);
+ S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig());
mockS3Client.putObject("main", new S3Object("baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip"));
mockS3Client.putObject("main", new S3Object("baseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/descriptor.json"));
@@ -82,7 +82,7 @@ public class S3DataSegmentMoverTest
public void testMoveNoop() throws Exception
{
MockStorageService mockS3Client = new MockStorageService();
- S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client);
+ S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig());
mockS3Client.putObject("archive", new S3Object("targetBaseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/index.zip"));
mockS3Client.putObject("archive", new S3Object("targetBaseKey/test/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/1/0/descriptor.json"));
@@ -103,7 +103,7 @@ public class S3DataSegmentMoverTest
public void testMoveException() throws Exception
{
MockStorageService mockS3Client = new MockStorageService();
- S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client);
+ S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig());
mover.move(
sourceSegment,
diff --git a/server/pom.xml b/server/pom.xml
index 34c9244f0ef..877d1153efc 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -28,7 +28,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/server/src/main/java/io/druid/client/CachePopulatingQueryRunner.java b/server/src/main/java/io/druid/client/CachePopulatingQueryRunner.java
index a9997946542..056d213ef7b 100644
--- a/server/src/main/java/io/druid/client/CachePopulatingQueryRunner.java
+++ b/server/src/main/java/io/druid/client/CachePopulatingQueryRunner.java
@@ -70,7 +70,7 @@ public class CachePopulatingQueryRunner implements QueryRunner
final CacheStrategy strategy = toolChest.getCacheStrategy(query);
- final boolean populateCache = Boolean.parseBoolean(query.getContextValue(CacheConfig.POPULATE_CACHE, "true"))
+ final boolean populateCache = query.getContextPopulateCache(true)
&& strategy != null
&& cacheConfig.isPopulateCache()
// historical only populates distributed cache since the cache lookups are done at broker.
diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java
index 65ac6bea415..0e63f9e4ac3 100644
--- a/server/src/main/java/io/druid/client/CachingClusteredClient.java
+++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java
@@ -62,7 +62,6 @@ import io.druid.timeline.partition.PartitionChunk;
import org.joda.time.DateTime;
import org.joda.time.Interval;
-import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@@ -125,24 +124,24 @@ public class CachingClusteredClient implements QueryRunner
final List> cachedResults = Lists.newArrayList();
final Map cachePopulatorMap = Maps.newHashMap();
- final boolean useCache = Boolean.parseBoolean(query.getContextValue(CacheConfig.USE_CACHE, "true"))
+ final boolean useCache = query.getContextUseCache(true)
&& strategy != null
&& cacheConfig.isUseCache();
- final boolean populateCache = Boolean.parseBoolean(query.getContextValue(CacheConfig.POPULATE_CACHE, "true"))
+ final boolean populateCache = query.getContextPopulateCache(true)
&& strategy != null && cacheConfig.isPopulateCache();
- final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false"));
+ final boolean isBySegment = query.getContextBySegment(false);
ImmutableMap.Builder contextBuilder = new ImmutableMap.Builder<>();
- final String priority = query.getContextValue("priority", "0");
+ final int priority = query.getContextPriority(0);
contextBuilder.put("priority", priority);
if (populateCache) {
- contextBuilder.put(CacheConfig.POPULATE_CACHE, "false");
- contextBuilder.put("bySegment", "true");
+ contextBuilder.put(CacheConfig.POPULATE_CACHE, false);
+ contextBuilder.put("bySegment", true);
}
- contextBuilder.put("intermediate", "true");
+ contextBuilder.put("intermediate", true);
final Query rewrittenQuery = query.withOverriddenContext(contextBuilder.build());
diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java
index fa95ba97f11..76c842029b8 100644
--- a/server/src/main/java/io/druid/client/DirectDruidClient.java
+++ b/server/src/main/java/io/druid/client/DirectDruidClient.java
@@ -106,7 +106,7 @@ public class DirectDruidClient implements QueryRunner
public Sequence run(Query query)
{
QueryToolChest> toolChest = warehouse.getToolChest(query);
- boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false"));
+ boolean isBySegment = query.getContextBySegment(false);
Pair types = typesMap.get(query.getClass());
if (types == null) {
diff --git a/server/src/main/java/io/druid/client/RoutingDruidClient.java b/server/src/main/java/io/druid/client/RoutingDruidClient.java
index 9fd3e2b0eac..7ad4ec5d820 100644
--- a/server/src/main/java/io/druid/client/RoutingDruidClient.java
+++ b/server/src/main/java/io/druid/client/RoutingDruidClient.java
@@ -68,13 +68,12 @@ public class RoutingDruidClient
}
public ListenableFuture run(
- String host,
+ String url,
Query query,
HttpResponseHandler responseHandler
)
{
final ListenableFuture future;
- final String url = String.format("http://%s/druid/v2/", host);
try {
log.debug("Querying url[%s]", url);
diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java
index 4a8332137d4..447caa4a7e6 100644
--- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java
+++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java
@@ -117,6 +117,12 @@ public class RealtimePlumberSchool implements PlumberSchool
this.rejectionPolicyFactory = factory;
}
+ @JsonProperty("maxPendingPersists")
+ public void setDefaultMaxPendingPersists(int maxPendingPersists)
+ {
+ this.maxPendingPersists = maxPendingPersists;
+ }
+
public void setEmitter(ServiceEmitter emitter)
{
this.emitter = emitter;
@@ -152,11 +158,6 @@ public class RealtimePlumberSchool implements PlumberSchool
this.queryExecutorService = executorService;
}
- public void setDefaultMaxPendingPersists(int maxPendingPersists)
- {
- this.maxPendingPersists = maxPendingPersists;
- }
-
@Override
public Plumber findPlumber(final Schema schema, final FireDepartmentMetrics metrics)
{
diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java
index 85f33a70007..5d1dc796b6b 100644
--- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java
+++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java
@@ -106,8 +106,6 @@ public class AsyncQueryForwardingServlet extends HttpServlet
}
req.setAttribute(DISPATCHED, true);
- resp.setStatus(200);
- resp.setContentType("application/x-javascript");
query = objectMapper.readValue(req.getInputStream(), Query.class);
queryId = query.getId();
@@ -132,6 +130,9 @@ public class AsyncQueryForwardingServlet extends HttpServlet
@Override
public ClientResponse handleResponse(HttpResponse response)
{
+ resp.setStatus(response.getStatus().getCode());
+ resp.setContentType("application/x-javascript");
+
byte[] bytes = getContentBytes(response.getContent());
if (bytes.length > 0) {
try {
@@ -209,7 +210,7 @@ public class AsyncQueryForwardingServlet extends HttpServlet
@Override
public void run()
{
- routingDruidClient.run(host, theQuery, responseHandler);
+ routingDruidClient.run(makeUrl(host, req), theQuery, responseHandler);
}
}
);
@@ -235,4 +236,14 @@ public class AsyncQueryForwardingServlet extends HttpServlet
.emit();
}
}
+
+ private String makeUrl(String host, HttpServletRequest req)
+ {
+ String queryString = req.getQueryString();
+
+ if (queryString == null) {
+ return String.format("http://%s%s", host, req.getRequestURI());
+ }
+ return String.format("http://%s%s?%s", host, req.getRequestURI(), req.getQueryString());
+ }
}
diff --git a/server/src/main/java/io/druid/server/QueryIDProvider.java b/server/src/main/java/io/druid/server/QueryIDProvider.java
index 9226af1606b..8283a73eff4 100644
--- a/server/src/main/java/io/druid/server/QueryIDProvider.java
+++ b/server/src/main/java/io/druid/server/QueryIDProvider.java
@@ -44,7 +44,7 @@ public class QueryIDProvider
return String.format(
"%s_%s_%s_%s_%s",
query.getDataSource(),
- query.getDuration(),
+ query.getIntervals(),
host,
new DateTime(),
id.incrementAndGet()
diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java
index 710511c02a4..a2e26f3a532 100644
--- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java
+++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java
@@ -217,6 +217,7 @@ public class CachingClusteredClientTest
.context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(client, new TimeseriesQueryQueryToolChest(new QueryConfig()));
+
testQueryCaching(
runner,
builder.build(),
@@ -280,6 +281,7 @@ public class CachingClusteredClientTest
.aggregators(AGGS)
.postAggregators(POST_AGGS)
.context(CONTEXT);
+
QueryRunner runner = new FinalizeResultsQueryRunner(client, new TimeseriesQueryQueryToolChest(new QueryConfig()));
testQueryCaching(
@@ -786,11 +788,11 @@ public class CachingClusteredClientTest
for (Capture queryCapture : queryCaptures) {
Query capturedQuery = (Query) queryCapture.getValue();
if (expectBySegment) {
- Assert.assertEquals("true", capturedQuery.getContextValue("bySegment"));
+ Assert.assertEquals(true, capturedQuery.getContextValue("bySegment"));
} else {
Assert.assertTrue(
capturedQuery.getContextValue("bySegment") == null ||
- capturedQuery.getContextValue("bySegment").equals("false")
+ capturedQuery.getContextValue("bySegment").equals(false)
);
}
}
diff --git a/services/pom.xml b/services/pom.xml
index 2029aed81fe..8c86d9962ba 100644
--- a/services/pom.xml
+++ b/services/pom.xml
@@ -27,7 +27,7 @@
io.druid
druid
- 0.6.83-SNAPSHOT
+ 0.6.85-SNAPSHOT
diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java
index 88582af0947..c145a053106 100644
--- a/services/src/main/java/io/druid/cli/CliBroker.java
+++ b/services/src/main/java/io/druid/cli/CliBroker.java
@@ -55,7 +55,7 @@ import java.util.List;
*/
@Command(
name = "broker",
- description = "Runs a broker node, see http://druid.io/docs/0.6.81/Broker.html for a description"
+ description = "Runs a broker node, see http://druid.io/docs/0.6.84/Broker.html for a description"
)
public class CliBroker extends ServerRunnable
{
diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java
index 2b2b17ec0fd..f04d894f638 100644
--- a/services/src/main/java/io/druid/cli/CliCoordinator.java
+++ b/services/src/main/java/io/druid/cli/CliCoordinator.java
@@ -66,7 +66,7 @@ import java.util.List;
*/
@Command(
name = "coordinator",
- description = "Runs the Coordinator, see http://druid.io/docs/0.6.81/Coordinator.html for a description."
+ description = "Runs the Coordinator, see http://druid.io/docs/0.6.84/Coordinator.html for a description."
)
public class CliCoordinator extends ServerRunnable
{
diff --git a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java
index 838d001a714..8532215521a 100644
--- a/services/src/main/java/io/druid/cli/CliHadoopIndexer.java
+++ b/services/src/main/java/io/druid/cli/CliHadoopIndexer.java
@@ -41,7 +41,7 @@ import java.util.List;
*/
@Command(
name = "hadoop",
- description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.81/Batch-ingestion.html for a description."
+ description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.84/Batch-ingestion.html for a description."
)
public class CliHadoopIndexer implements Runnable
{
diff --git a/services/src/main/java/io/druid/cli/CliHistorical.java b/services/src/main/java/io/druid/cli/CliHistorical.java
index 3a41e35e22c..941d31521d5 100644
--- a/services/src/main/java/io/druid/cli/CliHistorical.java
+++ b/services/src/main/java/io/druid/cli/CliHistorical.java
@@ -46,7 +46,7 @@ import java.util.List;
*/
@Command(
name = "historical",
- description = "Runs a Historical node, see http://druid.io/docs/0.6.81/Historical.html for a description"
+ description = "Runs a Historical node, see http://druid.io/docs/0.6.84/Historical.html for a description"
)
public class CliHistorical extends ServerRunnable
{
diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java
index e7208224033..f2c6221a723 100644
--- a/services/src/main/java/io/druid/cli/CliOverlord.java
+++ b/services/src/main/java/io/druid/cli/CliOverlord.java
@@ -93,7 +93,7 @@ import java.util.List;
*/
@Command(
name = "overlord",
- description = "Runs an Overlord node, see http://druid.io/docs/0.6.81/Indexing-Service.html for a description"
+ description = "Runs an Overlord node, see http://druid.io/docs/0.6.84/Indexing-Service.html for a description"
)
public class CliOverlord extends ServerRunnable
{
diff --git a/services/src/main/java/io/druid/cli/CliRealtime.java b/services/src/main/java/io/druid/cli/CliRealtime.java
index fcf55932d49..012ee37b31c 100644
--- a/services/src/main/java/io/druid/cli/CliRealtime.java
+++ b/services/src/main/java/io/druid/cli/CliRealtime.java
@@ -30,7 +30,7 @@ import java.util.List;
*/
@Command(
name = "realtime",
- description = "Runs a realtime node, see http://druid.io/docs/0.6.81/Realtime.html for a description"
+ description = "Runs a realtime node, see http://druid.io/docs/0.6.84/Realtime.html for a description"
)
public class CliRealtime extends ServerRunnable
{