review comments refactoring

This commit is contained in:
nishantmonu51 2014-12-11 16:33:14 +05:30
parent 3763357f6e
commit 32b4f55b8a
9 changed files with 100 additions and 100 deletions

View File

@ -29,7 +29,7 @@ import io.druid.query.filter.NoopDimFilter;
import io.druid.query.filter.NotDimFilter;
import io.druid.query.filter.OrDimFilter;
import io.druid.query.filter.SelectorDimFilter;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.metadata.metadata.ColumnIncluderator;
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchResultValue;
@ -1102,88 +1102,88 @@ public class Druids
}
/**
* A Builder for IngestMetadataQuery.
* A Builder for DataSourceMetadataQuery.
*
* Required: dataSource() must be called before build()
*
* Usage example:
* <pre><code>
* IngestMetadataQueryBuilder query = new IngestMetadataQueryBuilder()
* DataSourceMetadataQueryBuilder query = new DataSourceMetadataQueryBuilder()
* .dataSource("Example")
* .build();
* </code></pre>
*
* @see io.druid.query.ingestmetadata.IngestMetadataQuery
* @see io.druid.query.datasourcemetadata.DataSourceMetadataQuery
*/
public static class IngestMetadataQueryBuilder
public static class DataSourceMetadataQueryBuilder
{
private DataSource dataSource;
private QuerySegmentSpec querySegmentSpec;
private Map<String, Object> context;
public IngestMetadataQueryBuilder()
public DataSourceMetadataQueryBuilder()
{
dataSource = null;
querySegmentSpec = null;
context = null;
}
public IngestMetadataQuery build()
public DataSourceMetadataQuery build()
{
return new IngestMetadataQuery(
return new DataSourceMetadataQuery(
dataSource,
querySegmentSpec,
context
);
}
public IngestMetadataQueryBuilder copy(IngestMetadataQueryBuilder builder)
public DataSourceMetadataQueryBuilder copy(DataSourceMetadataQueryBuilder builder)
{
return new IngestMetadataQueryBuilder()
return new DataSourceMetadataQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.context(builder.context);
}
public IngestMetadataQueryBuilder dataSource(String ds)
public DataSourceMetadataQueryBuilder dataSource(String ds)
{
dataSource = new TableDataSource(ds);
return this;
}
public IngestMetadataQueryBuilder dataSource(DataSource ds)
public DataSourceMetadataQueryBuilder dataSource(DataSource ds)
{
dataSource = ds;
return this;
}
public IngestMetadataQueryBuilder intervals(QuerySegmentSpec q)
public DataSourceMetadataQueryBuilder intervals(QuerySegmentSpec q)
{
querySegmentSpec = q;
return this;
}
public IngestMetadataQueryBuilder intervals(String s)
public DataSourceMetadataQueryBuilder intervals(String s)
{
querySegmentSpec = new LegacySegmentSpec(s);
return this;
}
public IngestMetadataQueryBuilder intervals(List<Interval> l)
public DataSourceMetadataQueryBuilder intervals(List<Interval> l)
{
querySegmentSpec = new LegacySegmentSpec(l);
return this;
}
public IngestMetadataQueryBuilder context(Map<String, Object> c)
public DataSourceMetadataQueryBuilder context(Map<String, Object> c)
{
context = c;
return this;
}
}
public static IngestMetadataQueryBuilder newIngestMetadataQueryBuilder()
public static DataSourceMetadataQueryBuilder newDataSourceMetadataQueryBuilder()
{
return new IngestMetadataQueryBuilder();
return new DataSourceMetadataQueryBuilder();
}
}

View File

@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.metamx.common.guava.Sequence;
import io.druid.query.groupby.GroupByQuery;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.search.SearchQuery;
import io.druid.query.select.SelectQuery;
@ -46,7 +46,7 @@ import java.util.Map;
@JsonSubTypes.Type(name = Query.SEGMENT_METADATA, value = SegmentMetadataQuery.class),
@JsonSubTypes.Type(name = Query.SELECT, value = SelectQuery.class),
@JsonSubTypes.Type(name = Query.TOPN, value = TopNQuery.class),
@JsonSubTypes.Type(name = Query.INGEST_METADATA, value = IngestMetadataQuery.class)
@JsonSubTypes.Type(name = Query.DATASOURCE_METADATA, value = DataSourceMetadataQuery.class)
})
public interface Query<T>
@ -58,7 +58,7 @@ public interface Query<T>
public static final String SEGMENT_METADATA = "segmentMetadata";
public static final String SELECT = "select";
public static final String TOPN = "topN";
public static final String INGEST_METADATA = "ingestMetadata";
public static final String DATASOURCE_METADATA = "dataSourceMetadata";
public DataSource getDataSource();

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
@ -39,7 +39,7 @@ import java.util.Map;
/**
*/
public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultValue>>
public class DataSourceMetadataQuery extends BaseQuery<Result<DataSourceMetadataResultValue>>
{
public static final Interval MY_Y2K_INTERVAL = new Interval(
new DateTime("0000-01-01"),
@ -50,7 +50,7 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
@JsonCreator
public IngestMetadataQuery(
public DataSourceMetadataQuery(
@JsonProperty("dataSource") DataSource dataSource,
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
@JsonProperty("context") Map<String, Object> context
@ -73,13 +73,13 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
@Override
public String getType()
{
return Query.INGEST_METADATA;
return Query.DATASOURCE_METADATA;
}
@Override
public IngestMetadataQuery withOverriddenContext(Map<String, Object> contextOverrides)
public DataSourceMetadataQuery withOverriddenContext(Map<String, Object> contextOverrides)
{
return new IngestMetadataQuery(
return new DataSourceMetadataQuery(
getDataSource(),
getQuerySegmentSpec(),
computeOverridenContext(contextOverrides)
@ -87,9 +87,9 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
}
@Override
public IngestMetadataQuery withQuerySegmentSpec(QuerySegmentSpec spec)
public DataSourceMetadataQuery withQuerySegmentSpec(QuerySegmentSpec spec)
{
return new IngestMetadataQuery(
return new DataSourceMetadataQuery(
getDataSource(),
spec,
getContext()
@ -97,38 +97,38 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
}
@Override
public Query<Result<IngestMetadataResultValue>> withDataSource(DataSource dataSource)
public Query<Result<DataSourceMetadataResultValue>> withDataSource(DataSource dataSource)
{
return new IngestMetadataQuery(
return new DataSourceMetadataQuery(
dataSource,
getQuerySegmentSpec(),
getContext()
);
}
public Iterable<Result<IngestMetadataResultValue>> buildResult(DateTime timestamp, DateTime maxIngestedEventTime)
public Iterable<Result<DataSourceMetadataResultValue>> buildResult(DateTime timestamp, DateTime maxIngestedEventTime)
{
List<Result<IngestMetadataResultValue>> results = Lists.newArrayList();
List<Result<DataSourceMetadataResultValue>> results = Lists.newArrayList();
Map<String, Object> result = Maps.newHashMap();
if (maxIngestedEventTime != null) {
result.put(MAX_INGESTED_EVENT_TIME, maxIngestedEventTime);
}
if (!result.isEmpty()) {
results.add(new Result<>(timestamp, new IngestMetadataResultValue(result)));
results.add(new Result<>(timestamp, new DataSourceMetadataResultValue(result)));
}
return results;
}
public Iterable<Result<IngestMetadataResultValue>> mergeResults(List<Result<IngestMetadataResultValue>> results)
public Iterable<Result<DataSourceMetadataResultValue>> mergeResults(List<Result<DataSourceMetadataResultValue>> results)
{
if (results == null || results.isEmpty()) {
return Lists.newArrayList();
}
DateTime max = new DateTime(JodaUtils.MIN_INSTANT);
for (Result<IngestMetadataResultValue> result : results) {
for (Result<DataSourceMetadataResultValue> result : results) {
DateTime currMaxIngestedEventTime = result.getValue().getMaxIngestedEventTime();
if (currMaxIngestedEventTime != null && currMaxIngestedEventTime.isAfter(max)) {
max = currMaxIngestedEventTime;
@ -141,7 +141,7 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
@Override
public String toString()
{
return "IngestMetadataQuery{" +
return "DataSourceMetadataQuery{" +
"dataSource='" + getDataSource() + '\'' +
", querySegmentSpec=" + getQuerySegmentSpec() +
", duration=" + getDuration() +

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
package io.druid.query.datasourcemetadata;
import com.google.inject.Inject;
import com.metamx.common.ISE;
@ -39,27 +39,27 @@ import java.util.concurrent.ExecutorService;
/**
*/
public class IngestMetadataQueryRunnerFactory
implements QueryRunnerFactory<Result<IngestMetadataResultValue>, IngestMetadataQuery>
public class DataSourceMetadataQueryRunnerFactory
implements QueryRunnerFactory<Result<DataSourceMetadataResultValue>, DataSourceMetadataQuery>
{
private static final IngestMetadataQueryQueryToolChest toolChest = new IngestMetadataQueryQueryToolChest();
private static final DataSourceQueryQueryToolChest toolChest = new DataSourceQueryQueryToolChest();
private final QueryWatcher queryWatcher;
@Inject
public IngestMetadataQueryRunnerFactory(QueryWatcher queryWatcher)
public DataSourceMetadataQueryRunnerFactory(QueryWatcher queryWatcher)
{
this.queryWatcher = queryWatcher;
}
@Override
public QueryRunner<Result<IngestMetadataResultValue>> createRunner(final Segment segment)
public QueryRunner<Result<DataSourceMetadataResultValue>> createRunner(final Segment segment)
{
return new IngestMetadataQueryRunner(segment);
return new DataSourceMetadataQueryRunner(segment);
}
@Override
public QueryRunner<Result<IngestMetadataResultValue>> mergeRunners(
ExecutorService queryExecutor, Iterable<QueryRunner<Result<IngestMetadataResultValue>>> queryRunners
public QueryRunner<Result<DataSourceMetadataResultValue>> mergeRunners(
ExecutorService queryExecutor, Iterable<QueryRunner<Result<DataSourceMetadataResultValue>>> queryRunners
)
{
return new ChainedExecutionQueryRunner<>(
@ -68,37 +68,37 @@ public class IngestMetadataQueryRunnerFactory
}
@Override
public QueryToolChest<Result<IngestMetadataResultValue>, IngestMetadataQuery> getToolchest()
public QueryToolChest<Result<DataSourceMetadataResultValue>, DataSourceMetadataQuery> getToolchest()
{
return toolChest;
}
private static class IngestMetadataQueryRunner implements QueryRunner<Result<IngestMetadataResultValue>>
private static class DataSourceMetadataQueryRunner implements QueryRunner<Result<DataSourceMetadataResultValue>>
{
private final StorageAdapter adapter;
public IngestMetadataQueryRunner(Segment segment)
public DataSourceMetadataQueryRunner(Segment segment)
{
this.adapter = segment.asStorageAdapter();
}
@Override
public Sequence<Result<IngestMetadataResultValue>> run(
Query<Result<IngestMetadataResultValue>> input,
public Sequence<Result<DataSourceMetadataResultValue>> run(
Query<Result<DataSourceMetadataResultValue>> input,
Map<String, Object> responseContext
)
{
if (!(input instanceof IngestMetadataQuery)) {
throw new ISE("Got a [%s] which isn't a %s", input.getClass(), IngestMetadataQuery.class);
if (!(input instanceof DataSourceMetadataQuery)) {
throw new ISE("Got a [%s] which isn't a %s", input.getClass().getCanonicalName(), DataSourceMetadataQuery.class);
}
final IngestMetadataQuery legacyQuery = (IngestMetadataQuery) input;
final DataSourceMetadataQuery legacyQuery = (DataSourceMetadataQuery) input;
return new BaseSequence<>(
new BaseSequence.IteratorMaker<Result<IngestMetadataResultValue>, Iterator<Result<IngestMetadataResultValue>>>()
new BaseSequence.IteratorMaker<Result<DataSourceMetadataResultValue>, Iterator<Result<DataSourceMetadataResultValue>>>()
{
@Override
public Iterator<Result<IngestMetadataResultValue>> make()
public Iterator<Result<DataSourceMetadataResultValue>> make()
{
if (adapter == null) {
throw new ISE(
@ -113,7 +113,7 @@ public class IngestMetadataQueryRunnerFactory
}
@Override
public void cleanup(Iterator<Result<IngestMetadataResultValue>> toClean)
public void cleanup(Iterator<Result<DataSourceMetadataResultValue>> toClean)
{
}

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
@ -28,12 +28,12 @@ import java.util.Map;
/**
*/
public class IngestMetadataResultValue
public class DataSourceMetadataResultValue
{
private final Object value;
@JsonCreator
public IngestMetadataResultValue(
public DataSourceMetadataResultValue(
Object value
)
{
@ -49,7 +49,7 @@ public class IngestMetadataResultValue
public DateTime getMaxIngestedEventTime()
{
if (value instanceof Map) {
return getDateTimeValue(((Map) value).get(IngestMetadataQuery.MAX_INGESTED_EVENT_TIME));
return getDateTimeValue(((Map) value).get(DataSourceMetadataQuery.MAX_INGESTED_EVENT_TIME));
} else {
return getDateTimeValue(value);
}
@ -65,7 +65,7 @@ public class IngestMetadataResultValue
return false;
}
IngestMetadataResultValue that = (IngestMetadataResultValue) o;
DataSourceMetadataResultValue that = (DataSourceMetadataResultValue) o;
if (value != null ? !value.equals(that.value) : that.value != null) {
return false;
@ -83,7 +83,7 @@ public class IngestMetadataResultValue
@Override
public String toString()
{
return "IngestMetadataResultValue{" +
return "DataSourceMetadataResultValue{" +
"value=" + value +
'}';
}

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.ingestmetadata;
package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
@ -46,15 +46,15 @@ import java.util.Map;
/**
*/
public class IngestMetadataQueryQueryToolChest
extends QueryToolChest<Result<IngestMetadataResultValue>, IngestMetadataQuery>
public class DataSourceQueryQueryToolChest
extends QueryToolChest<Result<DataSourceMetadataResultValue>, DataSourceMetadataQuery>
{
private static final TypeReference<Result<IngestMetadataResultValue>> TYPE_REFERENCE = new TypeReference<Result<IngestMetadataResultValue>>()
private static final TypeReference<Result<DataSourceMetadataResultValue>> TYPE_REFERENCE = new TypeReference<Result<DataSourceMetadataResultValue>>()
{
};
@Override
public <T extends LogicalSegment> List<T> filterSegments(IngestMetadataQuery query, List<T> segments)
public <T extends LogicalSegment> List<T> filterSegments(DataSourceMetadataQuery query, List<T> segments)
{
if (segments.size() <= 1) {
return segments;
@ -80,25 +80,25 @@ public class IngestMetadataQueryQueryToolChest
}
@Override
public QueryRunner<Result<IngestMetadataResultValue>> mergeResults(
final QueryRunner<Result<IngestMetadataResultValue>> runner
public QueryRunner<Result<DataSourceMetadataResultValue>> mergeResults(
final QueryRunner<Result<DataSourceMetadataResultValue>> runner
)
{
return new BySegmentSkippingQueryRunner<Result<IngestMetadataResultValue>>(runner)
return new BySegmentSkippingQueryRunner<Result<DataSourceMetadataResultValue>>(runner)
{
@Override
protected Sequence<Result<IngestMetadataResultValue>> doRun(
QueryRunner<Result<IngestMetadataResultValue>> baseRunner,
Query<Result<IngestMetadataResultValue>> input,
protected Sequence<Result<DataSourceMetadataResultValue>> doRun(
QueryRunner<Result<DataSourceMetadataResultValue>> baseRunner,
Query<Result<DataSourceMetadataResultValue>> input,
Map<String, Object> context
)
{
IngestMetadataQuery query = (IngestMetadataQuery) input;
DataSourceMetadataQuery query = (DataSourceMetadataQuery) input;
return Sequences.simple(
query.mergeResults(
Sequences.toList(
baseRunner.run(query, context),
Lists.<Result<IngestMetadataResultValue>>newArrayList()
Lists.<Result<DataSourceMetadataResultValue>>newArrayList()
)
)
);
@ -107,19 +107,19 @@ public class IngestMetadataQueryQueryToolChest
}
@Override
public Sequence<Result<IngestMetadataResultValue>> mergeSequences(Sequence<Sequence<Result<IngestMetadataResultValue>>> seqOfSequences)
public Sequence<Result<DataSourceMetadataResultValue>> mergeSequences(Sequence<Sequence<Result<DataSourceMetadataResultValue>>> seqOfSequences)
{
return new OrderedMergeSequence<>(getOrdering(), seqOfSequences);
}
@Override
public Sequence<Result<IngestMetadataResultValue>> mergeSequencesUnordered(Sequence<Sequence<Result<IngestMetadataResultValue>>> seqOfSequences)
public Sequence<Result<DataSourceMetadataResultValue>> mergeSequencesUnordered(Sequence<Sequence<Result<DataSourceMetadataResultValue>>> seqOfSequences)
{
return new MergeSequence<>(getOrdering(), seqOfSequences);
}
@Override
public ServiceMetricEvent.Builder makeMetricBuilder(IngestMetadataQuery query)
public ServiceMetricEvent.Builder makeMetricBuilder(DataSourceMetadataQuery query)
{
return new ServiceMetricEvent.Builder()
.setUser2(DataSourceUtil.getMetricName(query.getDataSource()))
@ -128,26 +128,26 @@ public class IngestMetadataQueryQueryToolChest
}
@Override
public Function<Result<IngestMetadataResultValue>, Result<IngestMetadataResultValue>> makePreComputeManipulatorFn(
IngestMetadataQuery query, MetricManipulationFn fn
public Function<Result<DataSourceMetadataResultValue>, Result<DataSourceMetadataResultValue>> makePreComputeManipulatorFn(
DataSourceMetadataQuery query, MetricManipulationFn fn
)
{
return Functions.identity();
}
@Override
public TypeReference<Result<IngestMetadataResultValue>> getResultTypeReference()
public TypeReference<Result<DataSourceMetadataResultValue>> getResultTypeReference()
{
return TYPE_REFERENCE;
}
@Override
public CacheStrategy getCacheStrategy(IngestMetadataQuery query)
public CacheStrategy getCacheStrategy(DataSourceMetadataQuery query)
{
return null;
}
public Ordering<Result<IngestMetadataResultValue>> getOrdering()
public Ordering<Result<DataSourceMetadataResultValue>> getOrdering()
{
return Ordering.natural();
}

View File

@ -19,7 +19,7 @@
* under the Druid Corporate Contributor License Agreement.
*/
package io.druid.query.ingestmetadata;
package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
@ -50,14 +50,14 @@ import java.util.Arrays;
import java.util.List;
import java.util.Map;
public class IngestMetadataQueryTest
public class DataSourceMetadataQueryTest
{
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
@Test
public void testQuerySerialization() throws IOException
{
Query query = Druids.newIngestMetadataQueryBuilder()
Query query = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("testing")
.build();
@ -70,7 +70,7 @@ public class IngestMetadataQueryTest
@Test
public void testContextSerde() throws Exception
{
final IngestMetadataQuery query = Druids.newIngestMetadataQueryBuilder()
final DataSourceMetadataQuery query = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("foo")
.intervals("2013/2014")
.context(
@ -109,7 +109,7 @@ public class IngestMetadataQueryTest
{
final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(false);
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(
(QueryRunnerFactory) new IngestMetadataQueryRunnerFactory(
(QueryRunnerFactory) new DataSourceMetadataQueryRunnerFactory(
QueryRunnerTestHelper.NOOP_QUERYWATCHER
), new IncrementalIndexSegment(rtIndex, "test")
);
@ -121,16 +121,16 @@ public class IngestMetadataQueryTest
ImmutableMap.<String, Object>of("dim1", "x")
)
);
IngestMetadataQuery ingestMetadataQuery = Druids.newIngestMetadataQueryBuilder()
DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("testing")
.build();
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
Iterable<Result<IngestMetadataResultValue>> results = Sequences.toList(
runner.run(ingestMetadataQuery, context),
Lists.<Result<IngestMetadataResultValue>>newArrayList()
Iterable<Result<DataSourceMetadataResultValue>> results = Sequences.toList(
runner.run(dataSourceMetadataQuery, context),
Lists.<Result<DataSourceMetadataResultValue>>newArrayList()
);
IngestMetadataResultValue val = results.iterator().next().getValue();
DataSourceMetadataResultValue val = results.iterator().next().getValue();
DateTime maxIngestedEventTime = val.getMaxIngestedEventTime();
Assert.assertEquals(timestamp, maxIngestedEventTime);

View File

@ -28,8 +28,8 @@ import io.druid.query.QueryWatcher;
import io.druid.query.groupby.GroupByQuery;
import io.druid.query.groupby.GroupByQueryEngine;
import io.druid.query.groupby.GroupByQueryRunnerFactory;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.ingestmetadata.IngestMetadataQueryRunnerFactory;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.datasourcemetadata.DataSourceMetadataQueryRunnerFactory;
import io.druid.query.metadata.SegmentMetadataQueryRunnerFactory;
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchQueryRunnerFactory;
@ -59,7 +59,7 @@ public class QueryRunnerFactoryModule extends QueryToolChestModule
.put(GroupByQuery.class, GroupByQueryRunnerFactory.class)
.put(SelectQuery.class, SelectQueryRunnerFactory.class)
.put(TopNQuery.class, TopNQueryRunnerFactory.class)
.put(IngestMetadataQuery.class, IngestMetadataQueryRunnerFactory.class)
.put(DataSourceMetadataQuery.class, DataSourceMetadataQueryRunnerFactory.class)
.build();
@Override

View File

@ -29,8 +29,8 @@ import io.druid.query.QueryToolChest;
import io.druid.query.groupby.GroupByQuery;
import io.druid.query.groupby.GroupByQueryConfig;
import io.druid.query.groupby.GroupByQueryQueryToolChest;
import io.druid.query.ingestmetadata.IngestMetadataQuery;
import io.druid.query.ingestmetadata.IngestMetadataQueryQueryToolChest;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.datasourcemetadata.DataSourceQueryQueryToolChest;
import io.druid.query.metadata.SegmentMetadataQueryQueryToolChest;
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchQueryQueryToolChest;
@ -61,7 +61,7 @@ public class QueryToolChestModule implements Module
.put(GroupByQuery.class, GroupByQueryQueryToolChest.class)
.put(SelectQuery.class, SelectQueryQueryToolChest.class)
.put(TopNQuery.class, TopNQueryQueryToolChest.class)
.put(IngestMetadataQuery.class, IngestMetadataQueryQueryToolChest.class)
.put(DataSourceMetadataQuery.class, DataSourceQueryQueryToolChest.class)
.build();
@Override