mirror of https://github.com/apache/druid.git
prepare for next release
This commit is contained in:
parent
6eb1102f95
commit
a58f90a689
2
build.sh
2
build.sh
|
@ -30,4 +30,4 @@ echo "For examples, see: "
|
||||||
echo " "
|
echo " "
|
||||||
ls -1 examples/*/*sh
|
ls -1 examples/*/*sh
|
||||||
echo " "
|
echo " "
|
||||||
echo "See also http://druid.io/docs/0.6.30"
|
echo "See also http://druid.io/docs/0.6.31"
|
||||||
|
|
|
@ -3,7 +3,7 @@ layout: doc_page
|
||||||
---
|
---
|
||||||
# Booting a Single Node Cluster #
|
# Booting a Single Node Cluster #
|
||||||
|
|
||||||
[Loading Your Data](Tutorial%3A-Loading-Your-Data-Part-2.html) and [All About Queries](Tutorial%3A-All-About-Queries.html) contain recipes to boot a small druid cluster on localhost. Here we will boot a small cluster on EC2. You can checkout the code, or download a tarball from [here](http://static.druid.io/artifacts/druid-services-0.6.30-bin.tar.gz).
|
[Loading Your Data](Tutorial%3A-Loading-Your-Data-Part-2.html) and [All About Queries](Tutorial%3A-All-About-Queries.html) contain recipes to boot a small druid cluster on localhost. Here we will boot a small cluster on EC2. You can checkout the code, or download a tarball from [here](http://static.druid.io/artifacts/druid-services-0.6.31-bin.tar.gz).
|
||||||
|
|
||||||
The [ec2 run script](https://github.com/metamx/druid/blob/master/examples/bin/run_ec2.sh), run_ec2.sh, is located at 'examples/bin' if you have checked out the code, or at the root of the project if you've downloaded a tarball. The scripts rely on the [Amazon EC2 API Tools](http://aws.amazon.com/developertools/351), and you will need to set three environment variables:
|
The [ec2 run script](https://github.com/metamx/druid/blob/master/examples/bin/run_ec2.sh), run_ec2.sh, is located at 'examples/bin' if you have checked out the code, or at the root of the project if you've downloaded a tarball. The scripts rely on the [Amazon EC2 API Tools](http://aws.amazon.com/developertools/351), and you will need to set three environment variables:
|
||||||
|
|
||||||
|
|
|
@ -19,13 +19,13 @@ Clone Druid and build it:
|
||||||
git clone https://github.com/metamx/druid.git druid
|
git clone https://github.com/metamx/druid.git druid
|
||||||
cd druid
|
cd druid
|
||||||
git fetch --tags
|
git fetch --tags
|
||||||
git checkout druid-0.6.30
|
git checkout druid-0.6.31
|
||||||
./build.sh
|
./build.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### Downloading the DSK (Druid Standalone Kit)
|
### Downloading the DSK (Druid Standalone Kit)
|
||||||
|
|
||||||
[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.30-bin.tar.gz) a stand-alone tarball and run it:
|
[Download](http://static.druid.io/artifacts/releases/druid-services-0.6.31-bin.tar.gz) a stand-alone tarball and run it:
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
tar -xzf druid-services-0.X.X-bin.tar.gz
|
tar -xzf druid-services-0.X.X-bin.tar.gz
|
||||||
|
|
|
@ -27,7 +27,7 @@ druid.host=localhost
|
||||||
druid.service=realtime
|
druid.service=realtime
|
||||||
druid.port=8083
|
druid.port=8083
|
||||||
|
|
||||||
druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.30"]
|
druid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.31"]
|
||||||
|
|
||||||
|
|
||||||
druid.zk.service.host=localhost
|
druid.zk.service.host=localhost
|
||||||
|
|
|
@ -49,7 +49,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu
|
||||||
|
|
||||||
### Download a Tarball
|
### Download a Tarball
|
||||||
|
|
||||||
We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.30-bin.tar.gz). Download this file to a directory of your choosing.
|
We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.31-bin.tar.gz). Download this file to a directory of your choosing.
|
||||||
|
|
||||||
You can extract the awesomeness within by issuing:
|
You can extract the awesomeness within by issuing:
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ tar -zxvf druid-services-*-bin.tar.gz
|
||||||
Not too lost so far right? That's great! If you cd into the directory:
|
Not too lost so far right? That's great! If you cd into the directory:
|
||||||
|
|
||||||
```
|
```
|
||||||
cd druid-services-0.6.30
|
cd druid-services-0.6.31
|
||||||
```
|
```
|
||||||
|
|
||||||
You should see a bunch of files:
|
You should see a bunch of files:
|
||||||
|
|
|
@ -246,6 +246,21 @@ Issuing a [TimeBoundaryQuery](TimeBoundaryQuery.html) should yield:
|
||||||
} ]
|
} ]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Problems?
|
||||||
|
---------
|
||||||
|
|
||||||
|
If you decide to reuse the local firehose to ingest your own data and if you run into problems, you can read the individual task logs at:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
<Current working directory>/log/<task_id>.log
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
One thing to note is that the log file will only exist once the task completes with either SUCCESS or FAILURE.
|
||||||
|
Task logs can be stored locally or uploaded to [Deep Storage](Deep-Storage.html). More information about how to configure this is [here](Configuration.html).
|
||||||
|
|
||||||
|
Most common data ingestion problems are around timestamp formats and other malformed data issues.
|
||||||
|
|
||||||
Next Steps
|
Next Steps
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,7 @@ With real-world data, we recommend having a message bus such as [Apache Kafka](h
|
||||||
|
|
||||||
#### Setting up Kafka
|
#### Setting up Kafka
|
||||||
|
|
||||||
[KafkaFirehoseFactory](https://github.com/metamx/druid/blob/druid-0.6.30/realtime/src/main/java/com/metamx/druid/realtime/firehose/KafkaFirehoseFactory.java) is how druid communicates with Kafka. Using this [Firehose](Firehose.html) with the right configuration, we can import data into Druid in real-time without writing any code. To load data to a real-time node via Kafka, we'll first need to initialize Zookeeper and Kafka, and then configure and initialize a [Realtime](Realtime.html) node.
|
[KafkaFirehoseFactory](https://github.com/metamx/druid/blob/druid-0.6.31/realtime/src/main/java/com/metamx/druid/realtime/firehose/KafkaFirehoseFactory.java) is how druid communicates with Kafka. Using this [Firehose](Firehose.html) with the right configuration, we can import data into Druid in real-time without writing any code. To load data to a real-time node via Kafka, we'll first need to initialize Zookeeper and Kafka, and then configure and initialize a [Realtime](Realtime.html) node.
|
||||||
|
|
||||||
Instructions for booting a Zookeeper and then Kafka cluster are available [here](http://kafka.apache.org/07/quickstart.html).
|
Instructions for booting a Zookeeper and then Kafka cluster are available [here](http://kafka.apache.org/07/quickstart.html).
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ In this tutorial, we will set up other types of Druid nodes as well as and exter
|
||||||
|
|
||||||
If you followed the first tutorial, you should already have Druid downloaded. If not, let's go back and do that first.
|
If you followed the first tutorial, you should already have Druid downloaded. If not, let's go back and do that first.
|
||||||
|
|
||||||
You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.30-bin.tar.gz)
|
You can download the latest version of druid [here](http://static.druid.io/artifacts/releases/druid-services-0.6.31-bin.tar.gz)
|
||||||
|
|
||||||
and untar the contents within by issuing:
|
and untar the contents within by issuing:
|
||||||
|
|
||||||
|
@ -149,7 +149,7 @@ druid.port=8081
|
||||||
|
|
||||||
druid.zk.service.host=localhost
|
druid.zk.service.host=localhost
|
||||||
|
|
||||||
druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.30"]
|
druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.31"]
|
||||||
|
|
||||||
# Dummy read only AWS account (used to download example data)
|
# Dummy read only AWS account (used to download example data)
|
||||||
druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b
|
druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b
|
||||||
|
@ -238,7 +238,7 @@ druid.port=8083
|
||||||
|
|
||||||
druid.zk.service.host=localhost
|
druid.zk.service.host=localhost
|
||||||
|
|
||||||
druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.30","io.druid.extensions:druid-kafka-seven:0.6.30"]
|
druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.31","io.druid.extensions:druid-kafka-seven:0.6.31"]
|
||||||
|
|
||||||
# Change this config to db to hand off to the rest of the Druid cluster
|
# Change this config to db to hand off to the rest of the Druid cluster
|
||||||
druid.publish.type=noop
|
druid.publish.type=noop
|
||||||
|
|
|
@ -37,7 +37,7 @@ There are two ways to setup Druid: download a tarball, or [Build From Source](Bu
|
||||||
|
|
||||||
h3. Download a Tarball
|
h3. Download a Tarball
|
||||||
|
|
||||||
We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.30-bin.tar.gz)
|
We've built a tarball that contains everything you'll need. You'll find it [here](http://static.druid.io/artifacts/releases/druid-services-0.6.31-bin.tar.gz)
|
||||||
Download this file to a directory of your choosing.
|
Download this file to a directory of your choosing.
|
||||||
You can extract the awesomeness within by issuing:
|
You can extract the awesomeness within by issuing:
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ tar zxvf druid-services-*-bin.tar.gz
|
||||||
Not too lost so far right? That's great! If you cd into the directory:
|
Not too lost so far right? That's great! If you cd into the directory:
|
||||||
|
|
||||||
```
|
```
|
||||||
cd druid-services-0.6.30
|
cd druid-services-0.6.31
|
||||||
```
|
```
|
||||||
|
|
||||||
You should see a bunch of files:
|
You should see a bunch of files:
|
||||||
|
|
|
@ -9,7 +9,7 @@ There are two ways to setup Druid: download a tarball, or build it from source.
|
||||||
|
|
||||||
h3. Download a Tarball
|
h3. Download a Tarball
|
||||||
|
|
||||||
We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.30-bin.tar.gz.
|
We've built a tarball that contains everything you'll need. You'll find it "here":http://static.druid.io/artifacts/releases/druid-services-0.6.31-bin.tar.gz.
|
||||||
Download this bad boy to a directory of your choosing.
|
Download this bad boy to a directory of your choosing.
|
||||||
|
|
||||||
You can extract the awesomeness within by issuing:
|
You can extract the awesomeness within by issuing:
|
||||||
|
|
|
@ -15,9 +15,9 @@ h2. Getting Started
|
||||||
* "Tutorial: All About Queries":./Tutorial:-All-About-Queries.html
|
* "Tutorial: All About Queries":./Tutorial:-All-About-Queries.html
|
||||||
|
|
||||||
h2. Operations
|
h2. Operations
|
||||||
* "Cluster Setup":./Cluster-setup.html
|
|
||||||
* "Configuration":Configuration.html
|
* "Configuration":Configuration.html
|
||||||
* "Extending Druid":./Modules.html
|
* "Extending Druid":./Modules.html
|
||||||
|
* "Cluster Setup":./Cluster-setup.html
|
||||||
* "Booting a Production Cluster":./Booting-a-production-cluster.html
|
* "Booting a Production Cluster":./Booting-a-production-cluster.html
|
||||||
|
|
||||||
h2. Data Ingestion
|
h2. Data Ingestion
|
||||||
|
|
|
@ -4,7 +4,7 @@ druid.port=8081
|
||||||
|
|
||||||
druid.zk.service.host=localhost
|
druid.zk.service.host=localhost
|
||||||
|
|
||||||
druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.30"]
|
druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.31"]
|
||||||
|
|
||||||
# Dummy read only AWS account (used to download example data)
|
# Dummy read only AWS account (used to download example data)
|
||||||
druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b
|
druid.s3.secretKey=QyyfVZ7llSiRg6Qcrql1eEUG7buFpAK6T6engr1b
|
||||||
|
|
|
@ -4,7 +4,7 @@ druid.port=8083
|
||||||
|
|
||||||
druid.zk.service.host=localhost
|
druid.zk.service.host=localhost
|
||||||
|
|
||||||
druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.30","io.druid.extensions:druid-kafka-seven:0.6.30","io.druid.extensions:druid-rabbitmq:0.6.30"]
|
druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.31","io.druid.extensions:druid-kafka-seven:0.6.31","io.druid.extensions:druid-rabbitmq:0.6.31"]
|
||||||
|
|
||||||
# Change this config to db to hand off to the rest of the Druid cluster
|
# Change this config to db to hand off to the rest of the Druid cluster
|
||||||
druid.publish.type=noop
|
druid.publish.type=noop
|
||||||
|
|
|
@ -18,7 +18,8 @@
|
||||||
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>io.druid.extensions</groupId>
|
<groupId>io.druid.extensions</groupId>
|
||||||
<artifactId>druid-kafka-eight</artifactId>
|
<artifactId>druid-kafka-eight</artifactId>
|
||||||
|
@ -32,8 +33,6 @@
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.druid</groupId>
|
<groupId>io.druid</groupId>
|
||||||
<artifactId>druid-api</artifactId>
|
<artifactId>druid-api</artifactId>
|
||||||
|
@ -54,61 +53,6 @@
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- kafka_2.9.2 0.8.0-beta1 is bad, it is not correctly pulling dependencies, do it manually here -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.scala-lang</groupId>
|
|
||||||
<artifactId>scala-library</artifactId>
|
|
||||||
<version>2.9.2</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>net.sf.jopt-simple</groupId>
|
|
||||||
<artifactId>jopt-simple</artifactId>
|
|
||||||
<version>3.2</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.slf4j</groupId>
|
|
||||||
<artifactId>slf4j-simple</artifactId>
|
|
||||||
<version>1.6.4</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.scala-lang</groupId>
|
|
||||||
<artifactId>scala-compiler</artifactId>
|
|
||||||
<version>2.9.2</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.101tec</groupId>
|
|
||||||
<artifactId>zkclient</artifactId>
|
|
||||||
<version>0.3</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.xerial.snappy</groupId>
|
|
||||||
<artifactId>snappy-java</artifactId>
|
|
||||||
<version>1.0.4.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.yammer.metrics</groupId>
|
|
||||||
<artifactId>metrics-core</artifactId>
|
|
||||||
<version>2.2.0</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.yammer.metrics</groupId>
|
|
||||||
<artifactId>metrics-annotation</artifactId>
|
|
||||||
<version>2.2.0</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.easymock</groupId>
|
|
||||||
<artifactId>easymock</artifactId>
|
|
||||||
<version>3.0</version>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.scalatest</groupId>
|
|
||||||
<artifactId>scalatest_2.9.2</artifactId>
|
|
||||||
<version>1.8</version>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
|
|
||||||
<!-- Tests -->
|
<!-- Tests -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
|
|
|
@ -53,7 +53,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
@Command(
|
@Command(
|
||||||
name = "broker",
|
name = "broker",
|
||||||
description = "Runs a broker node, see http://druid.io/docs/0.6.30/Broker.html for a description"
|
description = "Runs a broker node, see http://druid.io/docs/0.6.31/Broker.html for a description"
|
||||||
)
|
)
|
||||||
public class CliBroker extends ServerRunnable
|
public class CliBroker extends ServerRunnable
|
||||||
{
|
{
|
||||||
|
|
|
@ -63,7 +63,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
@Command(
|
@Command(
|
||||||
name = "coordinator",
|
name = "coordinator",
|
||||||
description = "Runs the Coordinator, see http://druid.io/docs/0.6.30/Coordinator.html for a description."
|
description = "Runs the Coordinator, see http://druid.io/docs/0.6.31/Coordinator.html for a description."
|
||||||
)
|
)
|
||||||
public class CliCoordinator extends ServerRunnable
|
public class CliCoordinator extends ServerRunnable
|
||||||
{
|
{
|
||||||
|
|
|
@ -41,7 +41,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
@Command(
|
@Command(
|
||||||
name = "hadoop",
|
name = "hadoop",
|
||||||
description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.30/Batch-ingestion.html for a description."
|
description = "Runs the batch Hadoop Druid Indexer, see http://druid.io/docs/0.6.31/Batch-ingestion.html for a description."
|
||||||
)
|
)
|
||||||
public class CliHadoopIndexer implements Runnable
|
public class CliHadoopIndexer implements Runnable
|
||||||
{
|
{
|
||||||
|
|
|
@ -42,7 +42,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
@Command(
|
@Command(
|
||||||
name = "historical",
|
name = "historical",
|
||||||
description = "Runs a Historical node, see http://druid.io/docs/0.6.30/Historical.html for a description"
|
description = "Runs a Historical node, see http://druid.io/docs/0.6.31/Historical.html for a description"
|
||||||
)
|
)
|
||||||
public class CliHistorical extends ServerRunnable
|
public class CliHistorical extends ServerRunnable
|
||||||
{
|
{
|
||||||
|
|
|
@ -95,7 +95,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
@Command(
|
@Command(
|
||||||
name = "overlord",
|
name = "overlord",
|
||||||
description = "Runs an Overlord node, see http://druid.io/docs/0.6.30/Indexing-Service.html for a description"
|
description = "Runs an Overlord node, see http://druid.io/docs/0.6.31/Indexing-Service.html for a description"
|
||||||
)
|
)
|
||||||
public class CliOverlord extends ServerRunnable
|
public class CliOverlord extends ServerRunnable
|
||||||
{
|
{
|
||||||
|
|
|
@ -30,7 +30,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
@Command(
|
@Command(
|
||||||
name = "realtime",
|
name = "realtime",
|
||||||
description = "Runs a realtime node, see http://druid.io/docs/0.6.30/Realtime.html for a description"
|
description = "Runs a realtime node, see http://druid.io/docs/0.6.31/Realtime.html for a description"
|
||||||
)
|
)
|
||||||
public class CliRealtime extends ServerRunnable
|
public class CliRealtime extends ServerRunnable
|
||||||
{
|
{
|
||||||
|
|
|
@ -42,7 +42,7 @@ import java.util.concurrent.Executor;
|
||||||
*/
|
*/
|
||||||
@Command(
|
@Command(
|
||||||
name = "realtime",
|
name = "realtime",
|
||||||
description = "Runs a standalone realtime node for examples, see http://druid.io/docs/0.6.30/Realtime.html for a description"
|
description = "Runs a standalone realtime node for examples, see http://druid.io/docs/0.6.31/Realtime.html for a description"
|
||||||
)
|
)
|
||||||
public class CliRealtimeExample extends ServerRunnable
|
public class CliRealtimeExample extends ServerRunnable
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in New Issue