From df7a39a682e60326a8db7542da54d8f2308e5df1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Mon, 28 Jul 2014 09:59:21 -0700 Subject: [PATCH 1/3] fix index out of bounds --- .../java/io/druid/segment/QueryableIndexStorageAdapter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java index 7eb83b70f02..9dcd136e024 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java @@ -508,7 +508,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter if (multiValueRow.size() == 0) { return null; } else if (multiValueRow.size() == 1) { - return columnVals.lookupName(multiValueRow.get(1)); + return columnVals.lookupName(multiValueRow.get(0)); } else { final String[] strings = new String[multiValueRow.size()]; for (int i = 0 ; i < multiValueRow.size() ; i++) { From 61af84a4a151582683efd326989844621810c6d9 Mon Sep 17 00:00:00 2001 From: Jasmine Hegman Date: Mon, 28 Jul 2014 14:44:59 -0700 Subject: [PATCH 2/3] Cleaning up code sample indentations --- .../Tutorial:-Loading-Your-Data-Part-2.md | 66 +++++++++++-------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/docs/content/Tutorial:-Loading-Your-Data-Part-2.md b/docs/content/Tutorial:-Loading-Your-Data-Part-2.md index c6833454187..71f993ca5b9 100644 --- a/docs/content/Tutorial:-Loading-Your-Data-Part-2.md +++ b/docs/content/Tutorial:-Loading-Your-Data-Part-2.md @@ -109,22 +109,27 @@ You should be comfortable starting Druid nodes at this point. If not, it may be { "schema": { "dataSource": "wikipedia", - "aggregators" : [{ - "type" : "count", - "name" : "count" - }, { - "type" : "doubleSum", - "name" : "added", - "fieldName" : "added" - }, { - "type" : "doubleSum", - "name" : "deleted", - "fieldName" : "deleted" - }, { - "type" : "doubleSum", - "name" : "delta", - "fieldName" : "delta" - }], + "aggregators" : [ + { + "type" : "count", + "name" : "count" + }, + { + "type" : "doubleSum", + "name" : "added", + "fieldName" : "added" + }, + { + "type" : "doubleSum", + "name" : "deleted", + "fieldName" : "deleted" + }, + { + "type" : "doubleSum", + "name" : "delta", + "fieldName" : "delta" + } + ], "indexGranularity": "none" }, "config": { @@ -196,13 +201,15 @@ Note: This config uses a "test" [rejection policy](Plumber.html) which will acce Issuing a [TimeBoundaryQuery](TimeBoundaryQuery.html) to the real-time node should yield valid results: ```json -[ { - "timestamp" : "2013-08-31T01:02:33.000Z", - "result" : { - "minTime" : "2013-08-31T01:02:33.000Z", - "maxTime" : "2013-08-31T12:41:27.000Z" +[ + { + "timestamp" : "2013-08-31T01:02:33.000Z", + "result" : { + "minTime" : "2013-08-31T01:02:33.000Z", + "maxTime" : "2013-08-31T12:41:27.000Z" + } } -} ] +] ``` Batch Ingestion @@ -287,22 +294,27 @@ Examining the contents of the file, you should find: }, "targetPartitionSize" : 5000000, "rollupSpec" : { - "aggs": [{ + "aggs": [ + { "type" : "count", "name" : "count" - }, { + }, + { "type" : "doubleSum", "name" : "added", "fieldName" : "added" - }, { + }, + { "type" : "doubleSum", "name" : "deleted", "fieldName" : "deleted" - }, { + }, + { "type" : "doubleSum", "name" : "delta", "fieldName" : "delta" - }], + } + ], "rollupGranularity" : "none" } } From 1844bf4f087f67644fcf2bf00a275b34e63819ba Mon Sep 17 00:00:00 2001 From: Jasmine Hegman Date: Mon, 28 Jul 2014 14:46:39 -0700 Subject: [PATCH 3/3] Adding a section to detail supporting Kafka 8 --- .../Tutorial:-Loading-Your-Data-Part-2.md | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/docs/content/Tutorial:-Loading-Your-Data-Part-2.md b/docs/content/Tutorial:-Loading-Your-Data-Part-2.md index 71f993ca5b9..d0fc6a8fe45 100644 --- a/docs/content/Tutorial:-Loading-Your-Data-Part-2.md +++ b/docs/content/Tutorial:-Loading-Your-Data-Part-2.md @@ -341,3 +341,60 @@ Additional Information ---------------------- Getting data into Druid can definitely be difficult for first time users. Please don't hesitate to ask questions in our IRC channel or on our [google groups page](https://groups.google.com/forum/#!forum/druid-development). + + +Further Reading +--------------------- + +Ingesting from Kafka 8 +--------------------------------- + + +Continuing from the Kafka 7 examples, to support Kafka 8, a couple changes need to be made: + +- Update realtime node's configs for Kafka 8 extensions + - e.g. + - `druid.extensions.coordinates=[...,"io.druid.extensions:druid-kafka-seven:0.6.121",...]` + - becomes + - `druid.extensions.coordinates=[...,"io.druid.extensions:druid-kafka-eight:0.6.121",...]` +- Update realtime task config for changed keys + - `firehose.type`, `plumber.rejectionPolicyFactory`, and all of `firehose.consumerProps` changes. + +```json + + "firehose" : { + "type" : "kafka-0.8", + "consumerProps" : { + "zookeeper.connect": "localhost:2181", + "zookeeper.connection.timeout.ms": "15000", + "zookeeper.session.timeout.ms": "15000", + "zookeeper.sync.time.ms": "5000", + "group.id": "topic-pixel-local", + "fetch.message.max.bytes": "1048586", + "auto.offset.reset": "largest", + "auto.commit.enable": "false" + }, + "feed" : "druidtest", + "parser" : { + "timestampSpec" : { + "column" : "utcdt", + "format" : "iso" + }, + "data" : { + "format" : "json" + }, + "dimensionExclusions" : [ + "wp" + ] + } + }, + "plumber" : { + "type" : "realtime", + "windowPeriod" : "PT10m", + "segmentGranularity":"hour", + "basePersistDirectory" : "/tmp/realtime/basePersist", + "rejectionPolicyFactory": { + "type": "messageTime" + } + } +```