review comments refactoring

This commit is contained in:
nishantmonu51 2014-12-11 16:33:14 +05:30
parent 3763357f6e
commit 32b4f55b8a
9 changed files with 100 additions and 100 deletions

View File

@ -29,7 +29,7 @@ import io.druid.query.filter.NoopDimFilter;
import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.NotDimFilter;
import io.druid.query.filter.OrDimFilter; import io.druid.query.filter.OrDimFilter;
import io.druid.query.filter.SelectorDimFilter; import io.druid.query.filter.SelectorDimFilter;
import io.druid.query.ingestmetadata.IngestMetadataQuery; import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.metadata.metadata.ColumnIncluderator; import io.druid.query.metadata.metadata.ColumnIncluderator;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchResultValue; import io.druid.query.search.SearchResultValue;
@ -1102,88 +1102,88 @@ public class Druids
} }
/** /**
* A Builder for IngestMetadataQuery. * A Builder for DataSourceMetadataQuery.
* *
* Required: dataSource() must be called before build() * Required: dataSource() must be called before build()
* *
* Usage example: * Usage example:
* <pre><code> * <pre><code>
* IngestMetadataQueryBuilder query = new IngestMetadataQueryBuilder() * DataSourceMetadataQueryBuilder query = new DataSourceMetadataQueryBuilder()
* .dataSource("Example") * .dataSource("Example")
* .build(); * .build();
* </code></pre> * </code></pre>
* *
* @see io.druid.query.ingestmetadata.IngestMetadataQuery * @see io.druid.query.datasourcemetadata.DataSourceMetadataQuery
*/ */
public static class IngestMetadataQueryBuilder public static class DataSourceMetadataQueryBuilder
{ {
private DataSource dataSource; private DataSource dataSource;
private QuerySegmentSpec querySegmentSpec; private QuerySegmentSpec querySegmentSpec;
private Map<String, Object> context; private Map<String, Object> context;
public IngestMetadataQueryBuilder() public DataSourceMetadataQueryBuilder()
{ {
dataSource = null; dataSource = null;
querySegmentSpec = null; querySegmentSpec = null;
context = null; context = null;
} }
public IngestMetadataQuery build() public DataSourceMetadataQuery build()
{ {
return new IngestMetadataQuery( return new DataSourceMetadataQuery(
dataSource, dataSource,
querySegmentSpec, querySegmentSpec,
context context
); );
} }
public IngestMetadataQueryBuilder copy(IngestMetadataQueryBuilder builder) public DataSourceMetadataQueryBuilder copy(DataSourceMetadataQueryBuilder builder)
{ {
return new IngestMetadataQueryBuilder() return new DataSourceMetadataQueryBuilder()
.dataSource(builder.dataSource) .dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec) .intervals(builder.querySegmentSpec)
.context(builder.context); .context(builder.context);
} }
public IngestMetadataQueryBuilder dataSource(String ds) public DataSourceMetadataQueryBuilder dataSource(String ds)
{ {
dataSource = new TableDataSource(ds); dataSource = new TableDataSource(ds);
return this; return this;
} }
public IngestMetadataQueryBuilder dataSource(DataSource ds) public DataSourceMetadataQueryBuilder dataSource(DataSource ds)
{ {
dataSource = ds; dataSource = ds;
return this; return this;
} }
public IngestMetadataQueryBuilder intervals(QuerySegmentSpec q) public DataSourceMetadataQueryBuilder intervals(QuerySegmentSpec q)
{ {
querySegmentSpec = q; querySegmentSpec = q;
return this; return this;
} }
public IngestMetadataQueryBuilder intervals(String s) public DataSourceMetadataQueryBuilder intervals(String s)
{ {
querySegmentSpec = new LegacySegmentSpec(s); querySegmentSpec = new LegacySegmentSpec(s);
return this; return this;
} }
public IngestMetadataQueryBuilder intervals(List<Interval> l) public DataSourceMetadataQueryBuilder intervals(List<Interval> l)
{ {
querySegmentSpec = new LegacySegmentSpec(l); querySegmentSpec = new LegacySegmentSpec(l);
return this; return this;
} }
public IngestMetadataQueryBuilder context(Map<String, Object> c) public DataSourceMetadataQueryBuilder context(Map<String, Object> c)
{ {
context = c; context = c;
return this; return this;
} }
} }
public static IngestMetadataQueryBuilder newIngestMetadataQueryBuilder() public static DataSourceMetadataQueryBuilder newDataSourceMetadataQueryBuilder()
{ {
return new IngestMetadataQueryBuilder(); return new DataSourceMetadataQueryBuilder();
} }
} }

View File

@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQuery;
import io.druid.query.ingestmetadata.IngestMetadataQuery; import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.search.SearchQuery; import io.druid.query.search.search.SearchQuery;
import io.druid.query.select.SelectQuery; import io.druid.query.select.SelectQuery;
@ -46,7 +46,7 @@ import java.util.Map;
@JsonSubTypes.Type(name = Query.SEGMENT_METADATA, value = SegmentMetadataQuery.class), @JsonSubTypes.Type(name = Query.SEGMENT_METADATA, value = SegmentMetadataQuery.class),
@JsonSubTypes.Type(name = Query.SELECT, value = SelectQuery.class), @JsonSubTypes.Type(name = Query.SELECT, value = SelectQuery.class),
@JsonSubTypes.Type(name = Query.TOPN, value = TopNQuery.class), @JsonSubTypes.Type(name = Query.TOPN, value = TopNQuery.class),
@JsonSubTypes.Type(name = Query.INGEST_METADATA, value = IngestMetadataQuery.class) @JsonSubTypes.Type(name = Query.DATASOURCE_METADATA, value = DataSourceMetadataQuery.class)
}) })
public interface Query<T> public interface Query<T>
@ -58,7 +58,7 @@ public interface Query<T>
public static final String SEGMENT_METADATA = "segmentMetadata"; public static final String SEGMENT_METADATA = "segmentMetadata";
public static final String SELECT = "select"; public static final String SELECT = "select";
public static final String TOPN = "topN"; public static final String TOPN = "topN";
public static final String INGEST_METADATA = "ingestMetadata"; public static final String DATASOURCE_METADATA = "dataSourceMetadata";
public DataSource getDataSource(); public DataSource getDataSource();

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.query.ingestmetadata; package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
@ -39,7 +39,7 @@ import java.util.Map;
/** /**
*/ */
public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultValue>> public class DataSourceMetadataQuery extends BaseQuery<Result<DataSourceMetadataResultValue>>
{ {
public static final Interval MY_Y2K_INTERVAL = new Interval( public static final Interval MY_Y2K_INTERVAL = new Interval(
new DateTime("0000-01-01"), new DateTime("0000-01-01"),
@ -50,7 +50,7 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
@JsonCreator @JsonCreator
public IngestMetadataQuery( public DataSourceMetadataQuery(
@JsonProperty("dataSource") DataSource dataSource, @JsonProperty("dataSource") DataSource dataSource,
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec, @JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
@JsonProperty("context") Map<String, Object> context @JsonProperty("context") Map<String, Object> context
@ -73,13 +73,13 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
@Override @Override
public String getType() public String getType()
{ {
return Query.INGEST_METADATA; return Query.DATASOURCE_METADATA;
} }
@Override @Override
public IngestMetadataQuery withOverriddenContext(Map<String, Object> contextOverrides) public DataSourceMetadataQuery withOverriddenContext(Map<String, Object> contextOverrides)
{ {
return new IngestMetadataQuery( return new DataSourceMetadataQuery(
getDataSource(), getDataSource(),
getQuerySegmentSpec(), getQuerySegmentSpec(),
computeOverridenContext(contextOverrides) computeOverridenContext(contextOverrides)
@ -87,9 +87,9 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
} }
@Override @Override
public IngestMetadataQuery withQuerySegmentSpec(QuerySegmentSpec spec) public DataSourceMetadataQuery withQuerySegmentSpec(QuerySegmentSpec spec)
{ {
return new IngestMetadataQuery( return new DataSourceMetadataQuery(
getDataSource(), getDataSource(),
spec, spec,
getContext() getContext()
@ -97,38 +97,38 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
} }
@Override @Override
public Query<Result<IngestMetadataResultValue>> withDataSource(DataSource dataSource) public Query<Result<DataSourceMetadataResultValue>> withDataSource(DataSource dataSource)
{ {
return new IngestMetadataQuery( return new DataSourceMetadataQuery(
dataSource, dataSource,
getQuerySegmentSpec(), getQuerySegmentSpec(),
getContext() getContext()
); );
} }
public Iterable<Result<IngestMetadataResultValue>> buildResult(DateTime timestamp, DateTime maxIngestedEventTime) public Iterable<Result<DataSourceMetadataResultValue>> buildResult(DateTime timestamp, DateTime maxIngestedEventTime)
{ {
List<Result<IngestMetadataResultValue>> results = Lists.newArrayList(); List<Result<DataSourceMetadataResultValue>> results = Lists.newArrayList();
Map<String, Object> result = Maps.newHashMap(); Map<String, Object> result = Maps.newHashMap();
if (maxIngestedEventTime != null) { if (maxIngestedEventTime != null) {
result.put(MAX_INGESTED_EVENT_TIME, maxIngestedEventTime); result.put(MAX_INGESTED_EVENT_TIME, maxIngestedEventTime);
} }
if (!result.isEmpty()) { if (!result.isEmpty()) {
results.add(new Result<>(timestamp, new IngestMetadataResultValue(result))); results.add(new Result<>(timestamp, new DataSourceMetadataResultValue(result)));
} }
return results; return results;
} }
public Iterable<Result<IngestMetadataResultValue>> mergeResults(List<Result<IngestMetadataResultValue>> results) public Iterable<Result<DataSourceMetadataResultValue>> mergeResults(List<Result<DataSourceMetadataResultValue>> results)
{ {
if (results == null || results.isEmpty()) { if (results == null || results.isEmpty()) {
return Lists.newArrayList(); return Lists.newArrayList();
} }
DateTime max = new DateTime(JodaUtils.MIN_INSTANT); DateTime max = new DateTime(JodaUtils.MIN_INSTANT);
for (Result<IngestMetadataResultValue> result : results) { for (Result<DataSourceMetadataResultValue> result : results) {
DateTime currMaxIngestedEventTime = result.getValue().getMaxIngestedEventTime(); DateTime currMaxIngestedEventTime = result.getValue().getMaxIngestedEventTime();
if (currMaxIngestedEventTime != null && currMaxIngestedEventTime.isAfter(max)) { if (currMaxIngestedEventTime != null && currMaxIngestedEventTime.isAfter(max)) {
max = currMaxIngestedEventTime; max = currMaxIngestedEventTime;
@ -141,7 +141,7 @@ public class IngestMetadataQuery extends BaseQuery<Result<IngestMetadataResultVa
@Override @Override
public String toString() public String toString()
{ {
return "IngestMetadataQuery{" + return "DataSourceMetadataQuery{" +
"dataSource='" + getDataSource() + '\'' + "dataSource='" + getDataSource() + '\'' +
", querySegmentSpec=" + getQuerySegmentSpec() + ", querySegmentSpec=" + getQuerySegmentSpec() +
", duration=" + getDuration() + ", duration=" + getDuration() +

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.query.ingestmetadata; package io.druid.query.datasourcemetadata;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.common.ISE; import com.metamx.common.ISE;
@ -39,27 +39,27 @@ import java.util.concurrent.ExecutorService;
/** /**
*/ */
public class IngestMetadataQueryRunnerFactory public class DataSourceMetadataQueryRunnerFactory
implements QueryRunnerFactory<Result<IngestMetadataResultValue>, IngestMetadataQuery> implements QueryRunnerFactory<Result<DataSourceMetadataResultValue>, DataSourceMetadataQuery>
{ {
private static final IngestMetadataQueryQueryToolChest toolChest = new IngestMetadataQueryQueryToolChest(); private static final DataSourceQueryQueryToolChest toolChest = new DataSourceQueryQueryToolChest();
private final QueryWatcher queryWatcher; private final QueryWatcher queryWatcher;
@Inject @Inject
public IngestMetadataQueryRunnerFactory(QueryWatcher queryWatcher) public DataSourceMetadataQueryRunnerFactory(QueryWatcher queryWatcher)
{ {
this.queryWatcher = queryWatcher; this.queryWatcher = queryWatcher;
} }
@Override @Override
public QueryRunner<Result<IngestMetadataResultValue>> createRunner(final Segment segment) public QueryRunner<Result<DataSourceMetadataResultValue>> createRunner(final Segment segment)
{ {
return new IngestMetadataQueryRunner(segment); return new DataSourceMetadataQueryRunner(segment);
} }
@Override @Override
public QueryRunner<Result<IngestMetadataResultValue>> mergeRunners( public QueryRunner<Result<DataSourceMetadataResultValue>> mergeRunners(
ExecutorService queryExecutor, Iterable<QueryRunner<Result<IngestMetadataResultValue>>> queryRunners ExecutorService queryExecutor, Iterable<QueryRunner<Result<DataSourceMetadataResultValue>>> queryRunners
) )
{ {
return new ChainedExecutionQueryRunner<>( return new ChainedExecutionQueryRunner<>(
@ -68,37 +68,37 @@ public class IngestMetadataQueryRunnerFactory
} }
@Override @Override
public QueryToolChest<Result<IngestMetadataResultValue>, IngestMetadataQuery> getToolchest() public QueryToolChest<Result<DataSourceMetadataResultValue>, DataSourceMetadataQuery> getToolchest()
{ {
return toolChest; return toolChest;
} }
private static class IngestMetadataQueryRunner implements QueryRunner<Result<IngestMetadataResultValue>> private static class DataSourceMetadataQueryRunner implements QueryRunner<Result<DataSourceMetadataResultValue>>
{ {
private final StorageAdapter adapter; private final StorageAdapter adapter;
public IngestMetadataQueryRunner(Segment segment) public DataSourceMetadataQueryRunner(Segment segment)
{ {
this.adapter = segment.asStorageAdapter(); this.adapter = segment.asStorageAdapter();
} }
@Override @Override
public Sequence<Result<IngestMetadataResultValue>> run( public Sequence<Result<DataSourceMetadataResultValue>> run(
Query<Result<IngestMetadataResultValue>> input, Query<Result<DataSourceMetadataResultValue>> input,
Map<String, Object> responseContext Map<String, Object> responseContext
) )
{ {
if (!(input instanceof IngestMetadataQuery)) { if (!(input instanceof DataSourceMetadataQuery)) {
throw new ISE("Got a [%s] which isn't a %s", input.getClass(), IngestMetadataQuery.class); throw new ISE("Got a [%s] which isn't a %s", input.getClass().getCanonicalName(), DataSourceMetadataQuery.class);
} }
final IngestMetadataQuery legacyQuery = (IngestMetadataQuery) input; final DataSourceMetadataQuery legacyQuery = (DataSourceMetadataQuery) input;
return new BaseSequence<>( return new BaseSequence<>(
new BaseSequence.IteratorMaker<Result<IngestMetadataResultValue>, Iterator<Result<IngestMetadataResultValue>>>() new BaseSequence.IteratorMaker<Result<DataSourceMetadataResultValue>, Iterator<Result<DataSourceMetadataResultValue>>>()
{ {
@Override @Override
public Iterator<Result<IngestMetadataResultValue>> make() public Iterator<Result<DataSourceMetadataResultValue>> make()
{ {
if (adapter == null) { if (adapter == null) {
throw new ISE( throw new ISE(
@ -113,7 +113,7 @@ public class IngestMetadataQueryRunnerFactory
} }
@Override @Override
public void cleanup(Iterator<Result<IngestMetadataResultValue>> toClean) public void cleanup(Iterator<Result<DataSourceMetadataResultValue>> toClean)
{ {
} }

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.query.ingestmetadata; package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue; import com.fasterxml.jackson.annotation.JsonValue;
@ -28,12 +28,12 @@ import java.util.Map;
/** /**
*/ */
public class IngestMetadataResultValue public class DataSourceMetadataResultValue
{ {
private final Object value; private final Object value;
@JsonCreator @JsonCreator
public IngestMetadataResultValue( public DataSourceMetadataResultValue(
Object value Object value
) )
{ {
@ -49,7 +49,7 @@ public class IngestMetadataResultValue
public DateTime getMaxIngestedEventTime() public DateTime getMaxIngestedEventTime()
{ {
if (value instanceof Map) { if (value instanceof Map) {
return getDateTimeValue(((Map) value).get(IngestMetadataQuery.MAX_INGESTED_EVENT_TIME)); return getDateTimeValue(((Map) value).get(DataSourceMetadataQuery.MAX_INGESTED_EVENT_TIME));
} else { } else {
return getDateTimeValue(value); return getDateTimeValue(value);
} }
@ -65,7 +65,7 @@ public class IngestMetadataResultValue
return false; return false;
} }
IngestMetadataResultValue that = (IngestMetadataResultValue) o; DataSourceMetadataResultValue that = (DataSourceMetadataResultValue) o;
if (value != null ? !value.equals(that.value) : that.value != null) { if (value != null ? !value.equals(that.value) : that.value != null) {
return false; return false;
@ -83,7 +83,7 @@ public class IngestMetadataResultValue
@Override @Override
public String toString() public String toString()
{ {
return "IngestMetadataResultValue{" + return "DataSourceMetadataResultValue{" +
"value=" + value + "value=" + value +
'}'; '}';
} }

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/ */
package io.druid.query.ingestmetadata; package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function; import com.google.common.base.Function;
@ -46,15 +46,15 @@ import java.util.Map;
/** /**
*/ */
public class IngestMetadataQueryQueryToolChest public class DataSourceQueryQueryToolChest
extends QueryToolChest<Result<IngestMetadataResultValue>, IngestMetadataQuery> extends QueryToolChest<Result<DataSourceMetadataResultValue>, DataSourceMetadataQuery>
{ {
private static final TypeReference<Result<IngestMetadataResultValue>> TYPE_REFERENCE = new TypeReference<Result<IngestMetadataResultValue>>() private static final TypeReference<Result<DataSourceMetadataResultValue>> TYPE_REFERENCE = new TypeReference<Result<DataSourceMetadataResultValue>>()
{ {
}; };
@Override @Override
public <T extends LogicalSegment> List<T> filterSegments(IngestMetadataQuery query, List<T> segments) public <T extends LogicalSegment> List<T> filterSegments(DataSourceMetadataQuery query, List<T> segments)
{ {
if (segments.size() <= 1) { if (segments.size() <= 1) {
return segments; return segments;
@ -80,25 +80,25 @@ public class IngestMetadataQueryQueryToolChest
} }
@Override @Override
public QueryRunner<Result<IngestMetadataResultValue>> mergeResults( public QueryRunner<Result<DataSourceMetadataResultValue>> mergeResults(
final QueryRunner<Result<IngestMetadataResultValue>> runner final QueryRunner<Result<DataSourceMetadataResultValue>> runner
) )
{ {
return new BySegmentSkippingQueryRunner<Result<IngestMetadataResultValue>>(runner) return new BySegmentSkippingQueryRunner<Result<DataSourceMetadataResultValue>>(runner)
{ {
@Override @Override
protected Sequence<Result<IngestMetadataResultValue>> doRun( protected Sequence<Result<DataSourceMetadataResultValue>> doRun(
QueryRunner<Result<IngestMetadataResultValue>> baseRunner, QueryRunner<Result<DataSourceMetadataResultValue>> baseRunner,
Query<Result<IngestMetadataResultValue>> input, Query<Result<DataSourceMetadataResultValue>> input,
Map<String, Object> context Map<String, Object> context
) )
{ {
IngestMetadataQuery query = (IngestMetadataQuery) input; DataSourceMetadataQuery query = (DataSourceMetadataQuery) input;
return Sequences.simple( return Sequences.simple(
query.mergeResults( query.mergeResults(
Sequences.toList( Sequences.toList(
baseRunner.run(query, context), baseRunner.run(query, context),
Lists.<Result<IngestMetadataResultValue>>newArrayList() Lists.<Result<DataSourceMetadataResultValue>>newArrayList()
) )
) )
); );
@ -107,19 +107,19 @@ public class IngestMetadataQueryQueryToolChest
} }
@Override @Override
public Sequence<Result<IngestMetadataResultValue>> mergeSequences(Sequence<Sequence<Result<IngestMetadataResultValue>>> seqOfSequences) public Sequence<Result<DataSourceMetadataResultValue>> mergeSequences(Sequence<Sequence<Result<DataSourceMetadataResultValue>>> seqOfSequences)
{ {
return new OrderedMergeSequence<>(getOrdering(), seqOfSequences); return new OrderedMergeSequence<>(getOrdering(), seqOfSequences);
} }
@Override @Override
public Sequence<Result<IngestMetadataResultValue>> mergeSequencesUnordered(Sequence<Sequence<Result<IngestMetadataResultValue>>> seqOfSequences) public Sequence<Result<DataSourceMetadataResultValue>> mergeSequencesUnordered(Sequence<Sequence<Result<DataSourceMetadataResultValue>>> seqOfSequences)
{ {
return new MergeSequence<>(getOrdering(), seqOfSequences); return new MergeSequence<>(getOrdering(), seqOfSequences);
} }
@Override @Override
public ServiceMetricEvent.Builder makeMetricBuilder(IngestMetadataQuery query) public ServiceMetricEvent.Builder makeMetricBuilder(DataSourceMetadataQuery query)
{ {
return new ServiceMetricEvent.Builder() return new ServiceMetricEvent.Builder()
.setUser2(DataSourceUtil.getMetricName(query.getDataSource())) .setUser2(DataSourceUtil.getMetricName(query.getDataSource()))
@ -128,26 +128,26 @@ public class IngestMetadataQueryQueryToolChest
} }
@Override @Override
public Function<Result<IngestMetadataResultValue>, Result<IngestMetadataResultValue>> makePreComputeManipulatorFn( public Function<Result<DataSourceMetadataResultValue>, Result<DataSourceMetadataResultValue>> makePreComputeManipulatorFn(
IngestMetadataQuery query, MetricManipulationFn fn DataSourceMetadataQuery query, MetricManipulationFn fn
) )
{ {
return Functions.identity(); return Functions.identity();
} }
@Override @Override
public TypeReference<Result<IngestMetadataResultValue>> getResultTypeReference() public TypeReference<Result<DataSourceMetadataResultValue>> getResultTypeReference()
{ {
return TYPE_REFERENCE; return TYPE_REFERENCE;
} }
@Override @Override
public CacheStrategy getCacheStrategy(IngestMetadataQuery query) public CacheStrategy getCacheStrategy(DataSourceMetadataQuery query)
{ {
return null; return null;
} }
public Ordering<Result<IngestMetadataResultValue>> getOrdering() public Ordering<Result<DataSourceMetadataResultValue>> getOrdering()
{ {
return Ordering.natural(); return Ordering.natural();
} }

View File

@ -19,7 +19,7 @@
* under the Druid Corporate Contributor License Agreement. * under the Druid Corporate Contributor License Agreement.
*/ */
package io.druid.query.ingestmetadata; package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -50,14 +50,14 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
public class IngestMetadataQueryTest public class DataSourceMetadataQueryTest
{ {
private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
@Test @Test
public void testQuerySerialization() throws IOException public void testQuerySerialization() throws IOException
{ {
Query query = Druids.newIngestMetadataQueryBuilder() Query query = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("testing") .dataSource("testing")
.build(); .build();
@ -70,7 +70,7 @@ public class IngestMetadataQueryTest
@Test @Test
public void testContextSerde() throws Exception public void testContextSerde() throws Exception
{ {
final IngestMetadataQuery query = Druids.newIngestMetadataQueryBuilder() final DataSourceMetadataQuery query = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("foo") .dataSource("foo")
.intervals("2013/2014") .intervals("2013/2014")
.context( .context(
@ -109,7 +109,7 @@ public class IngestMetadataQueryTest
{ {
final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(false); final IncrementalIndex rtIndex = TestIndex.getIncrementalTestIndex(false);
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner( final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(
(QueryRunnerFactory) new IngestMetadataQueryRunnerFactory( (QueryRunnerFactory) new DataSourceMetadataQueryRunnerFactory(
QueryRunnerTestHelper.NOOP_QUERYWATCHER QueryRunnerTestHelper.NOOP_QUERYWATCHER
), new IncrementalIndexSegment(rtIndex, "test") ), new IncrementalIndexSegment(rtIndex, "test")
); );
@ -121,16 +121,16 @@ public class IngestMetadataQueryTest
ImmutableMap.<String, Object>of("dim1", "x") ImmutableMap.<String, Object>of("dim1", "x")
) )
); );
IngestMetadataQuery ingestMetadataQuery = Druids.newIngestMetadataQueryBuilder() DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("testing") .dataSource("testing")
.build(); .build();
Map<String, Object> context = new MapMaker().makeMap(); Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
Iterable<Result<IngestMetadataResultValue>> results = Sequences.toList( Iterable<Result<DataSourceMetadataResultValue>> results = Sequences.toList(
runner.run(ingestMetadataQuery, context), runner.run(dataSourceMetadataQuery, context),
Lists.<Result<IngestMetadataResultValue>>newArrayList() Lists.<Result<DataSourceMetadataResultValue>>newArrayList()
); );
IngestMetadataResultValue val = results.iterator().next().getValue(); DataSourceMetadataResultValue val = results.iterator().next().getValue();
DateTime maxIngestedEventTime = val.getMaxIngestedEventTime(); DateTime maxIngestedEventTime = val.getMaxIngestedEventTime();
Assert.assertEquals(timestamp, maxIngestedEventTime); Assert.assertEquals(timestamp, maxIngestedEventTime);

View File

@ -28,8 +28,8 @@ import io.druid.query.QueryWatcher;
import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQuery;
import io.druid.query.groupby.GroupByQueryEngine; import io.druid.query.groupby.GroupByQueryEngine;
import io.druid.query.groupby.GroupByQueryRunnerFactory; import io.druid.query.groupby.GroupByQueryRunnerFactory;
import io.druid.query.ingestmetadata.IngestMetadataQuery; import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.ingestmetadata.IngestMetadataQueryRunnerFactory; import io.druid.query.datasourcemetadata.DataSourceMetadataQueryRunnerFactory;
import io.druid.query.metadata.SegmentMetadataQueryRunnerFactory; import io.druid.query.metadata.SegmentMetadataQueryRunnerFactory;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchQueryRunnerFactory; import io.druid.query.search.SearchQueryRunnerFactory;
@ -59,7 +59,7 @@ public class QueryRunnerFactoryModule extends QueryToolChestModule
.put(GroupByQuery.class, GroupByQueryRunnerFactory.class) .put(GroupByQuery.class, GroupByQueryRunnerFactory.class)
.put(SelectQuery.class, SelectQueryRunnerFactory.class) .put(SelectQuery.class, SelectQueryRunnerFactory.class)
.put(TopNQuery.class, TopNQueryRunnerFactory.class) .put(TopNQuery.class, TopNQueryRunnerFactory.class)
.put(IngestMetadataQuery.class, IngestMetadataQueryRunnerFactory.class) .put(DataSourceMetadataQuery.class, DataSourceMetadataQueryRunnerFactory.class)
.build(); .build();
@Override @Override

View File

@ -29,8 +29,8 @@ import io.druid.query.QueryToolChest;
import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQuery;
import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.GroupByQueryConfig;
import io.druid.query.groupby.GroupByQueryQueryToolChest; import io.druid.query.groupby.GroupByQueryQueryToolChest;
import io.druid.query.ingestmetadata.IngestMetadataQuery; import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.ingestmetadata.IngestMetadataQueryQueryToolChest; import io.druid.query.datasourcemetadata.DataSourceQueryQueryToolChest;
import io.druid.query.metadata.SegmentMetadataQueryQueryToolChest; import io.druid.query.metadata.SegmentMetadataQueryQueryToolChest;
import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchQueryQueryToolChest; import io.druid.query.search.SearchQueryQueryToolChest;
@ -61,7 +61,7 @@ public class QueryToolChestModule implements Module
.put(GroupByQuery.class, GroupByQueryQueryToolChest.class) .put(GroupByQuery.class, GroupByQueryQueryToolChest.class)
.put(SelectQuery.class, SelectQueryQueryToolChest.class) .put(SelectQuery.class, SelectQueryQueryToolChest.class)
.put(TopNQuery.class, TopNQueryQueryToolChest.class) .put(TopNQuery.class, TopNQueryQueryToolChest.class)
.put(IngestMetadataQuery.class, IngestMetadataQueryQueryToolChest.class) .put(DataSourceMetadataQuery.class, DataSourceQueryQueryToolChest.class)
.build(); .build();
@Override @Override