Ingest metadata query implementation

This commit is contained in:
nishantmonu51 2014-12-10 19:44:00 +05:30
parent 89afd83116
commit 3763357f6e
13 changed files with 869 additions and 2 deletions

View File

@ -29,6 +29,7 @@ import io.druid.query.filter.NoopDimFilter;
import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.NotDimFilter;
import io.druid.query.filter.OrDimFilter; import io.druid.query.filter.OrDimFilter;
import io.druid.query.filter.SelectorDimFilter; import io.druid.query.filter.SelectorDimFilter;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.metadata.metadata.ColumnIncluderator; import io.druid.query.metadata.metadata.ColumnIncluderator;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchResultValue; import io.druid.query.search.SearchResultValue;
@ -1099,4 +1100,90 @@ public class Druids
{ {
return new SelectQueryBuilder(); return new SelectQueryBuilder();
} }
/**
* A Builder for IngestMetadataQuery.
*
* Required: dataSource() must be called before build()
*
* Usage example:
* <pre><code>
* IngestMetadataQueryBuilder query = new IngestMetadataQueryBuilder()
* .dataSource("Example")
* .build();
* </code></pre>
*
* @see io.druid.query.ingestmetadata.IngestMetadataQuery
*/
public static class IngestMetadataQueryBuilder
{
private DataSource dataSource;
private QuerySegmentSpec querySegmentSpec;
private Map<String, Object> context;
public IngestMetadataQueryBuilder()
{
dataSource = null;
querySegmentSpec = null;
context = null;
}
public IngestMetadataQuery build()
{
return new IngestMetadataQuery(
dataSource,
querySegmentSpec,
context
);
}
public IngestMetadataQueryBuilder copy(IngestMetadataQueryBuilder builder)
{
return new IngestMetadataQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.context(builder.context);
}
public IngestMetadataQueryBuilder dataSource(String ds)
{
dataSource = new TableDataSource(ds);
return this;
}
public IngestMetadataQueryBuilder dataSource(DataSource ds)
{
dataSource = ds;
return this;
}
public IngestMetadataQueryBuilder intervals(QuerySegmentSpec q)
{
querySegmentSpec = q;
return this;
}
public IngestMetadataQueryBuilder intervals(String s)
{
querySegmentSpec = new LegacySegmentSpec(s);
return this;
}
public IngestMetadataQueryBuilder intervals(List<Interval> l)
{
querySegmentSpec = new LegacySegmentSpec(l);
return this;
}
public IngestMetadataQueryBuilder context(Map<String, Object> c)
{
context = c;
return this;
}
}
public static IngestMetadataQueryBuilder newIngestMetadataQueryBuilder()
{
return new IngestMetadataQueryBuilder();
}
} }

View File

@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQuery;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.search.SearchQuery; import io.druid.query.search.search.SearchQuery;
import io.druid.query.select.SelectQuery; import io.druid.query.select.SelectQuery;
@ -44,7 +45,9 @@ import java.util.Map;
@JsonSubTypes.Type(name = Query.GROUP_BY, value = GroupByQuery.class), @JsonSubTypes.Type(name = Query.GROUP_BY, value = GroupByQuery.class),
@JsonSubTypes.Type(name = Query.SEGMENT_METADATA, value = SegmentMetadataQuery.class), @JsonSubTypes.Type(name = Query.SEGMENT_METADATA, value = SegmentMetadataQuery.class),
@JsonSubTypes.Type(name = Query.SELECT, value = SelectQuery.class), @JsonSubTypes.Type(name = Query.SELECT, value = SelectQuery.class),
@JsonSubTypes.Type(name = Query.TOPN, value = TopNQuery.class) @JsonSubTypes.Type(name = Query.TOPN, value = TopNQuery.class),
@JsonSubTypes.Type(name = Query.INGEST_METADATA, value = IngestMetadataQuery.class)
}) })
public interface Query<T> public interface Query<T>
{ {
@ -55,6 +58,7 @@ public interface Query<T>
public static final String SEGMENT_METADATA = "segmentMetadata"; public static final String SEGMENT_METADATA = "segmentMetadata";
public static final String SELECT = "select"; public static final String SELECT = "select";
public static final String TOPN = "topN"; public static final String TOPN = "topN";
public static final String INGEST_METADATA = "ingestMetadata";
public DataSource getDataSource(); public DataSource getDataSource();

View File

@ -0,0 +1,151 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.druid.common.utils.JodaUtils;
import io.druid.query.BaseQuery;
import io.druid.query.DataSource;
import io.druid.query.Query;
import io.druid.query.Result;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
*/
public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultValue>>
{
public static final Interval MY_Y2K_INTERVAL = new Interval(
new DateTime("0000-01-01"),
new DateTime("3000-01-01")
);
public static String MAX_INGESTED_EVENT_TIME = "maxIngestedEventTime";
@JsonCreator
public IngestMetadataQuery(
@JsonProperty("dataSource") DataSource dataSource,
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
@JsonProperty("context") Map<String, Object> context
)
{
super(
dataSource,
(querySegmentSpec == null) ? new MultipleIntervalSegmentSpec(Arrays.asList(MY_Y2K_INTERVAL))
: querySegmentSpec,
context
);
}
@Override
public boolean hasFilters()
{
return false;
}
@Override
public String getType()
{
return Query.INGEST_METADATA;
}
@Override
public IngestMetadataQuery withOverriddenContext(Map<String, Object> contextOverrides)
{
return new IngestMetadataQuery(
getDataSource(),
getQuerySegmentSpec(),
computeOverridenContext(contextOverrides)
);
}
@Override
public IngestMetadataQuery withQuerySegmentSpec(QuerySegmentSpec spec)
{
return new IngestMetadataQuery(
getDataSource(),
spec,
getContext()
);
}
@Override
public Query<Result<IngestMetadataResultValue>> withDataSource(DataSource dataSource)
{
return new IngestMetadataQuery(
dataSource,
getQuerySegmentSpec(),
getContext()
);
}
public Iterable<Result<IngestMetadataResultValue>> buildResult(DateTime timestamp, DateTime maxIngestedEventTime)
{
List<Result<IngestMetadataResultValue>> results = Lists.newArrayList();
Map<String, Object> result = Maps.newHashMap();
if (maxIngestedEventTime != null) {
result.put(MAX_INGESTED_EVENT_TIME, maxIngestedEventTime);
}
if (!result.isEmpty()) {
results.add(new Result<>(timestamp, new IngestMetadataResultValue(result)));
}
return results;
}
public Iterable<Result<IngestMetadataResultValue>> mergeResults(List<Result<IngestMetadataResultValue>> results)
{
if (results == null || results.isEmpty()) {
return Lists.newArrayList();
}
DateTime max = new DateTime(JodaUtils.MIN_INSTANT);
for (Result<IngestMetadataResultValue> result : results) {
DateTime currMaxIngestedEventTime = result.getValue().getMaxIngestedEventTime();
if (currMaxIngestedEventTime != null && currMaxIngestedEventTime.isAfter(max)) {
max = currMaxIngestedEventTime;
}
}
return buildResult(max, max);
}
@Override
public String toString()
{
return "IngestMetadataQuery{" +
"dataSource='" + getDataSource() + '\'' +
", querySegmentSpec=" + getQuerySegmentSpec() +
", duration=" + getDuration() +
'}';
}
}

View File

@ -0,0 +1,154 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Ordering;
import com.metamx.common.guava.MergeSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.collections.OrderedMergeSequence;
import io.druid.query.BySegmentSkippingQueryRunner;
import io.druid.query.CacheStrategy;
import io.druid.query.DataSourceUtil;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.Result;
import io.druid.query.aggregation.MetricManipulationFn;
import io.druid.timeline.LogicalSegment;
import java.util.List;
import java.util.Map;
/**
*/
public class IngestMetadataQueryQueryToolChest
extends QueryToolChest<Result<IngestMetadataResultValue>, IngestMetadataQuery>
{
private static final TypeReference<Result<IngestMetadataResultValue>> TYPE_REFERENCE = new TypeReference<Result<IngestMetadataResultValue>>()
{
};
@Override
public <T extends LogicalSegment> List<T> filterSegments(IngestMetadataQuery query, List<T> segments)
{
if (segments.size() <= 1) {
return segments;
}
final T min = segments.get(0);
final T max = segments.get(segments.size() - 1);
return Lists.newArrayList(
Iterables.filter(
segments,
new Predicate<T>()
{
@Override
public boolean apply(T input)
{
return (min != null && input.getInterval().overlaps(min.getInterval())) ||
(max != null && input.getInterval().overlaps(max.getInterval()));
}
}
)
);
}
@Override
public QueryRunner<Result<IngestMetadataResultValue>> mergeResults(
final QueryRunner<Result<IngestMetadataResultValue>> runner
)
{
return new BySegmentSkippingQueryRunner<Result<IngestMetadataResultValue>>(runner)
{
@Override
protected Sequence<Result<IngestMetadataResultValue>> doRun(
QueryRunner<Result<IngestMetadataResultValue>> baseRunner,
Query<Result<IngestMetadataResultValue>> input,
Map<String, Object> context
)
{
IngestMetadataQuery query = (IngestMetadataQuery) input;
return Sequences.simple(
query.mergeResults(
Sequences.toList(
baseRunner.run(query, context),
Lists.<Result<IngestMetadataResultValue>>newArrayList()
)
)
);
}
};
}
@Override
public Sequence<Result<IngestMetadataResultValue>> mergeSequences(Sequence<Sequence<Result<IngestMetadataResultValue>>> seqOfSequences)
{
return new OrderedMergeSequence<>(getOrdering(), seqOfSequences);
}
@Override
public Sequence<Result<IngestMetadataResultValue>> mergeSequencesUnordered(Sequence<Sequence<Result<IngestMetadataResultValue>>> seqOfSequences)
{
return new MergeSequence<>(getOrdering(), seqOfSequences);
}
@Override
public ServiceMetricEvent.Builder makeMetricBuilder(IngestMetadataQuery query)
{
return new ServiceMetricEvent.Builder()
.setUser2(DataSourceUtil.getMetricName(query.getDataSource()))
.setUser4(query.getType())
.setUser6("false");
}
@Override
public Function<Result<IngestMetadataResultValue>, Result<IngestMetadataResultValue>> makePreComputeManipulatorFn(
IngestMetadataQuery query, MetricManipulationFn fn
)
{
return Functions.identity();
}
@Override
public TypeReference<Result<IngestMetadataResultValue>> getResultTypeReference()
{
return TYPE_REFERENCE;
}
@Override
public CacheStrategy getCacheStrategy(IngestMetadataQuery query)
{
return null;
}
public Ordering<Result<IngestMetadataResultValue>> getOrdering()
{
return Ordering.natural();
}
}

View File

@ -0,0 +1,124 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
import com.google.inject.Inject;
import com.metamx.common.ISE;
import com.metamx.common.guava.BaseSequence;
import com.metamx.common.guava.Sequence;
import io.druid.query.ChainedExecutionQueryRunner;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryToolChest;
import io.druid.query.QueryWatcher;
import io.druid.query.Result;
import io.druid.segment.Segment;
import io.druid.segment.StorageAdapter;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ExecutorService;
/**
*/
public class IngestMetadataQueryRunnerFactory
implements QueryRunnerFactory<Result<IngestMetadataResultValue>, IngestMetadataQuery>
{
private static final IngestMetadataQueryQueryToolChest toolChest = new IngestMetadataQueryQueryToolChest();
private final QueryWatcher queryWatcher;
@Inject
public IngestMetadataQueryRunnerFactory(QueryWatcher queryWatcher)
{
this.queryWatcher = queryWatcher;
}
@Override
public QueryRunner<Result<IngestMetadataResultValue>> createRunner(final Segment segment)
{
return new IngestMetadataQueryRunner(segment);
}
@Override
public QueryRunner<Result<IngestMetadataResultValue>> mergeRunners(
ExecutorService queryExecutor, Iterable<QueryRunner<Result<IngestMetadataResultValue>>> queryRunners
)
{
return new ChainedExecutionQueryRunner<>(
queryExecutor, toolChest.getOrdering(), queryWatcher, queryRunners
);
}
@Override
public QueryToolChest<Result<IngestMetadataResultValue>, IngestMetadataQuery> getToolchest()
{
return toolChest;
}
private static class IngestMetadataQueryRunner implements QueryRunner<Result<IngestMetadataResultValue>>
{
private final StorageAdapter adapter;
public IngestMetadataQueryRunner(Segment segment)
{
this.adapter = segment.asStorageAdapter();
}
@Override
public Sequence<Result<IngestMetadataResultValue>> run(
Query<Result<IngestMetadataResultValue>> input,
Map<String, Object> responseContext
)
{
if (!(input instanceof IngestMetadataQuery)) {
throw new ISE("Got a [%s] which isn't a %s", input.getClass(), IngestMetadataQuery.class);
}
final IngestMetadataQuery legacyQuery = (IngestMetadataQuery) input;
return new BaseSequence<>(
new BaseSequence.IteratorMaker<Result<IngestMetadataResultValue>, Iterator<Result<IngestMetadataResultValue>>>()
{
@Override
public Iterator<Result<IngestMetadataResultValue>> make()
{
if (adapter == null) {
throw new ISE(
"Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped."
);
}
return legacyQuery.buildResult(
adapter.getInterval().getStart(),
adapter.getMaxIngestedEventTime()
).iterator();
}
@Override
public void cleanup(Iterator<Result<IngestMetadataResultValue>> toClean)
{
}
}
);
}
}
}

View File

@ -0,0 +1,105 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.metamx.common.IAE;
import org.joda.time.DateTime;
import java.util.Map;
/**
*/
public class IngestMetadataResultValue
{
private final Object value;
@JsonCreator
public IngestMetadataResultValue(
Object value
)
{
this.value = value;
}
@JsonValue
public Object getBaseObject()
{
return value;
}
public DateTime getMaxIngestedEventTime()
{
if (value instanceof Map) {
return getDateTimeValue(((Map) value).get(IngestMetadataQuery.MAX_INGESTED_EVENT_TIME));
} else {
return getDateTimeValue(value);
}
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
IngestMetadataResultValue that = (IngestMetadataResultValue) o;
if (value != null ? !value.equals(that.value) : that.value != null) {
return false;
}
return true;
}
@Override
public int hashCode()
{
return value != null ? value.hashCode() : 0;
}
@Override
public String toString()
{
return "IngestMetadataResultValue{" +
"value=" + value +
'}';
}
private DateTime getDateTimeValue(Object val)
{
if (val == null) {
return null;
}
if (val instanceof DateTime) {
return (DateTime) val;
} else if (val instanceof String) {
return new DateTime(val);
} else {
throw new IAE("Cannot get time from type[%s]", val.getClass());
}
}
}

View File

@ -133,6 +133,13 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
return Capabilities.builder().dimensionValuesSorted(true).build(); return Capabilities.builder().dimensionValuesSorted(true).build();
} }
@Override
public DateTime getMaxIngestedEventTime()
{
// For immutable indexes, maxIngestedEventTime is maxTime.
return getMaxTime();
}
@Override @Override
public Sequence<Cursor> makeCursors(Filter filter, Interval interval, QueryGranularity gran) public Sequence<Cursor> makeCursors(Filter filter, Interval interval, QueryGranularity gran)
{ {

View File

@ -35,4 +35,5 @@ public interface StorageAdapter extends CursorFactory
public DateTime getMinTime(); public DateTime getMinTime();
public DateTime getMaxTime(); public DateTime getMaxTime();
public Capabilities getCapabilities(); public Capabilities getCapabilities();
public DateTime getMaxIngestedEventTime();
} }

View File

@ -68,6 +68,8 @@ import java.util.concurrent.atomic.AtomicInteger;
*/ */
public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>, Closeable public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>, Closeable
{ {
private volatile DateTime maxIngestedEventTime;
protected static ColumnSelectorFactory makeColumnSelectorFactory( protected static ColumnSelectorFactory makeColumnSelectorFactory(
final AggregatorFactory agg, final AggregatorFactory agg,
final ThreadLocal<InputRow> in, final ThreadLocal<InputRow> in,
@ -428,7 +430,16 @@ public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>,
} }
final TimeAndDims key = new TimeAndDims(Math.max(gran.truncate(row.getTimestampFromEpoch()), minTimestamp), dims); final TimeAndDims key = new TimeAndDims(Math.max(gran.truncate(row.getTimestampFromEpoch()), minTimestamp), dims);
return addToFacts(metrics, deserializeComplexMetrics, row, numEntries, key, in); final Integer rv = addToFacts(metrics, deserializeComplexMetrics, row, numEntries, key, in);
updateMaxIngestedTime(row.getTimestamp());
return rv;
}
public synchronized void updateMaxIngestedTime(DateTime eventTime)
{
if (maxIngestedEventTime == null || maxIngestedEventTime.isBefore(eventTime)) {
maxIngestedEventTime = eventTime;
}
} }
public boolean isEmpty() public boolean isEmpty()
@ -591,6 +602,11 @@ public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>,
}; };
} }
public DateTime getMaxIngestedEventTime()
{
return maxIngestedEventTime;
}
class DimensionHolder class DimensionHolder
{ {
private final Map<String, DimDim> dimensions; private final Map<String, DimDim> dimensions;

View File

@ -122,6 +122,12 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
return Capabilities.builder().dimensionValuesSorted(false).build(); return Capabilities.builder().dimensionValuesSorted(false).build();
} }
@Override
public DateTime getMaxIngestedEventTime()
{
return index.getMaxIngestedEventTime();
}
@Override @Override
public Sequence<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) public Sequence<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran)
{ {

View File

@ -0,0 +1,206 @@
/*
* Druid - a distributed column store.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* This file Copyright (C) 2014 N3TWORK, Inc. and contributed to the Druid project
* under the Druid Corporate Contributor License Agreement.
*/
package io.druid.query.ingestmetadata;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
import com.metamx.common.guava.Sequences;
import io.druid.data.input.MapBasedInputRow;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.Druids;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.Result;
import io.druid.query.timeboundary.TimeBoundaryQueryQueryToolChest;
import io.druid.segment.IncrementalIndexSegment;
import io.druid.segment.TestIndex;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.timeline.LogicalSegment;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
public class IngestMetadataQueryTest
{
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
@Test
public void testQuerySerialization() throws IOException
{
Query query = Druids.newIngestMetadataQueryBuilder()
.dataSource("testing")
.build();
String json = jsonMapper.writeValueAsString(query);
Query serdeQuery = jsonMapper.readValue(json, Query.class);
Assert.assertEquals(query, serdeQuery);
}
@Test
public void testContextSerde() throws Exception
{
final IngestMetadataQuery query = Druids.newIngestMetadataQueryBuilder()
.dataSource("foo")
.intervals("2013/2014")
.context(
ImmutableMap.<String, Object>of(
"priority",
1,
"useCache",
true,
"populateCache",
true,
"finalize",
true
)
).build();
final ObjectMapper mapper = new DefaultObjectMapper();
final Query serdeQuery = mapper.readValue(
mapper.writeValueAsBytes(
mapper.readValue(
mapper.writeValueAsString(
query
), Query.class
)
), Query.class
);
Assert.assertEquals(1, serdeQuery.getContextValue("priority"));
Assert.assertEquals(true, serdeQuery.getContextValue("useCache"));
Assert.assertEquals(true, serdeQuery.getContextValue("populateCache"));
Assert.assertEquals(true, serdeQuery.getContextValue("finalize"));
}
@Test
public void testMaxIngestedEventTime() throws Exception
{
final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(false);
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(
(QueryRunnerFactory) new IngestMetadataQueryRunnerFactory(
QueryRunnerTestHelper.NOOP_QUERYWATCHER
), new IncrementalIndexSegment(rtIndex, "test")
);
DateTime timestamp = new DateTime(System.currentTimeMillis());
rtIndex.add(
new MapBasedInputRow(
timestamp.getMillis(),
ImmutableList.of("dim1"),
ImmutableMap.<String, Object>of("dim1", "x")
)
);
IngestMetadataQuery ingestMetadataQuery = Druids.newIngestMetadataQueryBuilder()
.dataSource("testing")
.build();
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
Iterable<Result<IngestMetadataResultValue>> results = Sequences.toList(
runner.run(ingestMetadataQuery, context),
Lists.<Result<IngestMetadataResultValue>>newArrayList()
);
IngestMetadataResultValue val = results.iterator().next().getValue();
DateTime maxIngestedEventTime = val.getMaxIngestedEventTime();
Assert.assertEquals(timestamp, maxIngestedEventTime);
}
@Test
public void testFilterSegments()
{
List<LogicalSegment> segments = new TimeBoundaryQueryQueryToolChest().filterSegments(
null,
Arrays.asList(
new LogicalSegment()
{
@Override
public Interval getInterval()
{
return new Interval("2013-01-01/P1D");
}
},
new LogicalSegment()
{
@Override
public Interval getInterval()
{
return new Interval("2013-01-01T01/PT1H");
}
},
new LogicalSegment()
{
@Override
public Interval getInterval()
{
return new Interval("2013-01-01T02/PT1H");
}
}
)
);
Assert.assertEquals(segments.size(), 3);
List<LogicalSegment> expected = Arrays.asList(
new LogicalSegment()
{
@Override
public Interval getInterval()
{
return new Interval("2013-01-01/P1D");
}
},
new LogicalSegment()
{
@Override
public Interval getInterval()
{
return new Interval("2013-01-01T01/PT1H");
}
},
new LogicalSegment()
{
@Override
public Interval getInterval()
{
return new Interval("2013-01-01T02/PT1H");
}
}
);
for (int i = 0; i < segments.size(); i++) {
Assert.assertEquals(segments.get(i).getInterval(), expected.get(i).getInterval());
}
}
}

View File

@ -28,6 +28,8 @@ import io.druid.query.QueryWatcher;
import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQuery;
import io.druid.query.groupby.GroupByQueryEngine; import io.druid.query.groupby.GroupByQueryEngine;
import io.druid.query.groupby.GroupByQueryRunnerFactory; import io.druid.query.groupby.GroupByQueryRunnerFactory;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.ingestmetadata.IngestMetadataQueryRunnerFactory;
import io.druid.query.metadata.SegmentMetadataQueryRunnerFactory; import io.druid.query.metadata.SegmentMetadataQueryRunnerFactory;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchQueryRunnerFactory; import io.druid.query.search.SearchQueryRunnerFactory;
@ -57,6 +59,7 @@ public class QueryRunnerFactoryModule extends QueryToolChestModule
.put(GroupByQuery.class, GroupByQueryRunnerFactory.class) .put(GroupByQuery.class, GroupByQueryRunnerFactory.class)
.put(SelectQuery.class, SelectQueryRunnerFactory.class) .put(SelectQuery.class, SelectQueryRunnerFactory.class)
.put(TopNQuery.class, TopNQueryRunnerFactory.class) .put(TopNQuery.class, TopNQueryRunnerFactory.class)
.put(IngestMetadataQuery.class, IngestMetadataQueryRunnerFactory.class)
.build(); .build();
@Override @Override

View File

@ -29,6 +29,8 @@ import io.druid.query.QueryToolChest;
import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQuery;
import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryConfig;
import io.druid.query.groupby.GroupByQueryQueryToolChest; import io.druid.query.groupby.GroupByQueryQueryToolChest;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.ingestmetadata.IngestMetadataQueryQueryToolChest;
import io.druid.query.metadata.SegmentMetadataQueryQueryToolChest; import io.druid.query.metadata.SegmentMetadataQueryQueryToolChest;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchQueryQueryToolChest; import io.druid.query.search.SearchQueryQueryToolChest;
@ -59,6 +61,7 @@ public class QueryToolChestModule implements Module
.put(GroupByQuery.class, GroupByQueryQueryToolChest.class) .put(GroupByQuery.class, GroupByQueryQueryToolChest.class)
.put(SelectQuery.class, SelectQueryQueryToolChest.class) .put(SelectQuery.class, SelectQueryQueryToolChest.class)
.put(TopNQuery.class, TopNQueryQueryToolChest.class) .put(TopNQuery.class, TopNQueryQueryToolChest.class)
.put(IngestMetadataQuery.class, IngestMetadataQueryQueryToolChest.class)
.build(); .build();
@Override @Override