add docs + fix tests

This commit is contained in:
nishantmonu51 2014-12-11 17:58:01 +05:30
parent 7ad03087c0
commit a0d3579a92
3 changed files with 39 additions and 1 deletions

View File

@ -0,0 +1,31 @@
---
layout: doc_page
---
# Data Source Metadata Queries
Data Source Metadata queries return ingestion related information for a dataSource. It returns the timestamp of latest ingested event for the datasource. The grammar is:
```json
{
"queryType" : "dataSourceMetadata",
"dataSource": "sample_datasource",
}
```
There are 2 main parts to a Data Source Metadata query:
|property|description|required?|
|--------|-----------|---------|
|queryType|This String should always be "dataSourceMetadata"; this is the first thing Druid looks at to figure out how to interpret the query|yes|
|dataSource|A String defining the data source to query, very similar to a table in a relational database|yes|
|context|An additional JSON Object which can be used to specify certain flags.|no|
The format of the result is:
```json
[ {
"timestamp" : "2013-05-09T18:24:00.000Z",
"result" : {
"maxIngestedEventTime" : "2013-05-09T18:24:09.007Z",
}
} ]
```

View File

@ -52,6 +52,7 @@ h2. Querying
** "Granularities":./Granularities.html ** "Granularities":./Granularities.html
** "DimensionSpecs":./DimensionSpecs.html ** "DimensionSpecs":./DimensionSpecs.html
* Query Types * Query Types
** "DataSourceMetadataQuery":./DataSourceMetadataQuery.html
** "GroupBy":./GroupByQuery.html ** "GroupBy":./GroupByQuery.html
*** "LimitSpec":./LimitSpec.html *** "LimitSpec":./LimitSpec.html
*** "Having":./Having.html *** "Having":./Having.html

View File

@ -28,6 +28,7 @@ import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker; import com.google.common.collect.MapMaker;
import com.metamx.common.guava.Sequences; import com.metamx.common.guava.Sequences;
import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity;
import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.Druids; import io.druid.query.Druids;
import io.druid.query.Query; import io.druid.query.Query;
@ -35,10 +36,13 @@ import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryRunnerTestHelper; import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.Result; import io.druid.query.Result;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.timeboundary.TimeBoundaryQueryQueryToolChest; import io.druid.query.timeboundary.TimeBoundaryQueryQueryToolChest;
import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.IncrementalIndexSegment;
import io.druid.segment.TestIndex; import io.druid.segment.TestIndex;
import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.timeline.LogicalSegment; import io.druid.timeline.LogicalSegment;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
@ -107,7 +111,9 @@ public class DataSourceMetadataQueryTest
@Test @Test
public void testMaxIngestedEventTime() throws Exception public void testMaxIngestedEventTime() throws Exception
{ {
final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(false); final IncrementalIndex rtIndex = new OnheapIncrementalIndex(
0L, QueryGranularity.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000
);;
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner( final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(
(QueryRunnerFactory) new DataSourceMetadataQueryRunnerFactory( (QueryRunnerFactory) new DataSourceMetadataQueryRunnerFactory(
QueryRunnerTestHelper.NOOP_QUERYWATCHER QueryRunnerTestHelper.NOOP_QUERYWATCHER