mirror of
https://github.com/apache/druid.git
synced 2025-02-22 10:16:12 +00:00
Merge branch 'master' of https://github.com/metamx/druid into kit-building
This commit is contained in:
commit
16455fe99b
@ -18,8 +18,7 @@
|
||||
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
-->
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.metamx.druid</groupId>
|
||||
<artifactId>druid-client</artifactId>
|
||||
@ -29,7 +28,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@ -121,29 +121,39 @@ public class Initialization
|
||||
if (tmp_props.getProperty(zkHostsProperty) != null) {
|
||||
final ConfigurationObjectFactory factory = Config.createFactory(tmp_props);
|
||||
|
||||
Lifecycle lifecycle = new Lifecycle();
|
||||
ZkPathsConfig config;
|
||||
try {
|
||||
final ZkPathsConfig config = factory.build(ZkPathsConfig.class);
|
||||
CuratorFramework curator = makeCuratorFramework(factory.build(CuratorConfig.class), lifecycle);
|
||||
|
||||
lifecycle.start();
|
||||
|
||||
final Stat stat = curator.checkExists().forPath(config.getPropertiesPath());
|
||||
if (stat != null) {
|
||||
final byte[] data = curator.getData().forPath(config.getPropertiesPath());
|
||||
zkProps.load(new InputStreamReader(new ByteArrayInputStream(data), Charsets.UTF_8));
|
||||
}
|
||||
|
||||
// log properties from zk
|
||||
for (String prop : zkProps.stringPropertyNames()) {
|
||||
log.info("Loaded(zk) Property[%s] as [%s]", prop, zkProps.getProperty(prop));
|
||||
}
|
||||
config = factory.build(ZkPathsConfig.class);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw Throwables.propagate(e);
|
||||
catch (IllegalArgumentException e) {
|
||||
log.warn(e, "Unable to build ZkPathsConfig. Cannot load properties from ZK.");
|
||||
config = null;
|
||||
}
|
||||
finally {
|
||||
lifecycle.stop();
|
||||
|
||||
if (config != null) {
|
||||
Lifecycle lifecycle = new Lifecycle();
|
||||
try {
|
||||
CuratorFramework curator = makeCuratorFramework(factory.build(CuratorConfig.class), lifecycle);
|
||||
|
||||
lifecycle.start();
|
||||
|
||||
final Stat stat = curator.checkExists().forPath(config.getPropertiesPath());
|
||||
if (stat != null) {
|
||||
final byte[] data = curator.getData().forPath(config.getPropertiesPath());
|
||||
zkProps.load(new InputStreamReader(new ByteArrayInputStream(data), Charsets.UTF_8));
|
||||
}
|
||||
|
||||
// log properties from zk
|
||||
for (String prop : zkProps.stringPropertyNames()) {
|
||||
log.info("Loaded(zk) Property[%s] as [%s]", prop, zkProps.getProperty(prop));
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
finally {
|
||||
lifecycle.stop();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.warn("property[%s] not set, skipping ZK-specified properties.", zkHostsProperty);
|
||||
|
@ -21,9 +21,12 @@ package com.metamx.druid.query.group;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.druid.BaseQuery;
|
||||
import com.metamx.druid.Query;
|
||||
import com.metamx.druid.QueryGranularity;
|
||||
@ -34,6 +37,9 @@ import com.metamx.druid.query.Queries;
|
||||
import com.metamx.druid.query.dimension.DefaultDimensionSpec;
|
||||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
import com.metamx.druid.query.filter.DimFilter;
|
||||
import com.metamx.druid.query.group.limit.DefaultLimitSpec;
|
||||
import com.metamx.druid.query.group.limit.LimitSpec;
|
||||
import com.metamx.druid.query.group.limit.OrderByColumnSpec;
|
||||
import com.metamx.druid.query.segment.LegacySegmentSpec;
|
||||
import com.metamx.druid.query.segment.QuerySegmentSpec;
|
||||
|
||||
@ -49,16 +55,20 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
private final LimitSpec limitSpec;
|
||||
private final DimFilter dimFilter;
|
||||
private final QueryGranularity granularity;
|
||||
private final List<DimensionSpec> dimensions;
|
||||
private final List<AggregatorFactory> aggregatorSpecs;
|
||||
private final List<PostAggregator> postAggregatorSpecs;
|
||||
|
||||
private final Function<Sequence<Row>, Sequence<Row>> orderByLimitFn;
|
||||
|
||||
@JsonCreator
|
||||
public GroupByQuery(
|
||||
@JsonProperty("dataSource") String dataSource,
|
||||
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
|
||||
@JsonProperty("limitSpec") LimitSpec limitSpec,
|
||||
@JsonProperty("filter") DimFilter dimFilter,
|
||||
@JsonProperty("granularity") QueryGranularity granularity,
|
||||
@JsonProperty("dimensions") List<DimensionSpec> dimensions,
|
||||
@ -68,6 +78,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
)
|
||||
{
|
||||
super(dataSource, querySegmentSpec, context);
|
||||
this.limitSpec = (limitSpec == null) ? new DefaultLimitSpec() : limitSpec;
|
||||
this.dimFilter = dimFilter;
|
||||
this.granularity = granularity;
|
||||
this.dimensions = dimensions == null ? ImmutableList.<DimensionSpec>of() : dimensions;
|
||||
@ -77,6 +88,14 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
Preconditions.checkNotNull(this.granularity, "Must specify a granularity");
|
||||
Preconditions.checkNotNull(this.aggregatorSpecs, "Must specify at least one aggregator");
|
||||
Queries.verifyAggregations(this.aggregatorSpecs, this.postAggregatorSpecs);
|
||||
|
||||
orderByLimitFn = this.limitSpec.build(this.dimensions, this.aggregatorSpecs, this.postAggregatorSpecs);
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public LimitSpec getLimitSpec()
|
||||
{
|
||||
return limitSpec;
|
||||
}
|
||||
|
||||
@JsonProperty("filter")
|
||||
@ -121,12 +140,18 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
return Query.GROUP_BY;
|
||||
}
|
||||
|
||||
public Sequence<Row> applyLimit(Sequence<Row> results)
|
||||
{
|
||||
return orderByLimitFn.apply(results);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GroupByQuery withOverriddenContext(Map<String, String> contextOverride)
|
||||
{
|
||||
return new GroupByQuery(
|
||||
getDataSource(),
|
||||
getQuerySegmentSpec(),
|
||||
limitSpec,
|
||||
dimFilter,
|
||||
granularity,
|
||||
dimensions,
|
||||
@ -142,6 +167,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
return new GroupByQuery(
|
||||
getDataSource(),
|
||||
spec,
|
||||
limitSpec,
|
||||
dimFilter,
|
||||
granularity,
|
||||
dimensions,
|
||||
@ -162,12 +188,17 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
private List<PostAggregator> postAggregatorSpecs;
|
||||
private Map<String, String> context;
|
||||
|
||||
private LimitSpec limitSpec = null;
|
||||
private List<OrderByColumnSpec> orderByColumnSpecs = Lists.newArrayList();
|
||||
private int limit = Integer.MAX_VALUE;
|
||||
|
||||
private Builder() {}
|
||||
|
||||
private Builder(Builder builder)
|
||||
{
|
||||
dataSource = builder.dataSource;
|
||||
querySegmentSpec = builder.querySegmentSpec;
|
||||
limitSpec = builder.limitSpec;
|
||||
dimFilter = builder.dimFilter;
|
||||
granularity = builder.granularity;
|
||||
dimensions = builder.dimensions;
|
||||
@ -187,6 +218,56 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
return setQuerySegmentSpec(new LegacySegmentSpec(interval));
|
||||
}
|
||||
|
||||
public Builder limit(int limit)
|
||||
{
|
||||
ensureExplicitLimitNotSet();
|
||||
this.limit = limit;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addOrderByColumn(String dimension)
|
||||
{
|
||||
return addOrderByColumn(dimension, (OrderByColumnSpec.Direction) null);
|
||||
}
|
||||
|
||||
public Builder addOrderByColumn(String dimension, String direction)
|
||||
{
|
||||
return addOrderByColumn(dimension, OrderByColumnSpec.determineDirection(direction));
|
||||
}
|
||||
|
||||
public Builder addOrderByColumn(String dimension, OrderByColumnSpec.Direction direction)
|
||||
{
|
||||
return addOrderByColumn(new OrderByColumnSpec(dimension, direction));
|
||||
}
|
||||
|
||||
public Builder addOrderByColumn(OrderByColumnSpec columnSpec)
|
||||
{
|
||||
ensureExplicitLimitNotSet();
|
||||
this.orderByColumnSpecs.add(columnSpec);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setLimitSpec(LimitSpec limitSpec)
|
||||
{
|
||||
ensureFluentLimitsNotSet();
|
||||
this.limitSpec = limitSpec;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void ensureExplicitLimitNotSet()
|
||||
{
|
||||
if (limitSpec != null) {
|
||||
throw new ISE("Ambiguous build, limitSpec[%s] already set", limitSpec);
|
||||
}
|
||||
}
|
||||
|
||||
private void ensureFluentLimitsNotSet()
|
||||
{
|
||||
if (! (limit == Integer.MAX_VALUE && orderByColumnSpecs.isEmpty()) ) {
|
||||
throw new ISE("Ambiguous build, limit[%s] or columnSpecs[%s] already set.", limit, orderByColumnSpecs);
|
||||
}
|
||||
}
|
||||
|
||||
public Builder setQuerySegmentSpec(QuerySegmentSpec querySegmentSpec)
|
||||
{
|
||||
this.querySegmentSpec = querySegmentSpec;
|
||||
@ -276,9 +357,18 @@ public class GroupByQuery extends BaseQuery<Row>
|
||||
|
||||
public GroupByQuery build()
|
||||
{
|
||||
final LimitSpec theLimitSpec;
|
||||
if (limitSpec == null) {
|
||||
theLimitSpec = new DefaultLimitSpec(orderByColumnSpecs, limit);
|
||||
}
|
||||
else {
|
||||
theLimitSpec = limitSpec;
|
||||
}
|
||||
|
||||
return new GroupByQuery(
|
||||
dataSource,
|
||||
querySegmentSpec,
|
||||
theLimitSpec,
|
||||
dimFilter,
|
||||
granularity,
|
||||
dimensions,
|
||||
|
@ -21,10 +21,10 @@ package com.metamx.druid.query.group;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Functions;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.guava.Accumulator;
|
||||
import com.metamx.common.guava.ConcatSequence;
|
||||
@ -44,7 +44,6 @@ import com.metamx.druid.query.QueryToolChest;
|
||||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
import com.metamx.druid.utils.PropUtils;
|
||||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
|
||||
import org.joda.time.Interval;
|
||||
import org.joda.time.Minutes;
|
||||
|
||||
@ -57,8 +56,9 @@ import java.util.Properties;
|
||||
*/
|
||||
public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery>
|
||||
{
|
||||
|
||||
private static final TypeReference<Row> TYPE_REFERENCE = new TypeReference<Row>(){};
|
||||
private static final TypeReference<Row> TYPE_REFERENCE = new TypeReference<Row>()
|
||||
{
|
||||
};
|
||||
private static final String GROUP_BY_MERGE_KEY = "groupByMerge";
|
||||
private static final Map<String, String> NO_MERGE_CONTEXT = ImmutableMap.of(GROUP_BY_MERGE_KEY, "false");
|
||||
|
||||
@ -81,22 +81,21 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
||||
{
|
||||
if (Boolean.valueOf(input.getContextValue(GROUP_BY_MERGE_KEY, "true"))) {
|
||||
return mergeGroupByResults(((GroupByQuery) input).withOverriddenContext(NO_MERGE_CONTEXT), runner);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return runner.run(input);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private Sequence<Row> mergeGroupByResults(GroupByQuery query, QueryRunner<Row> runner)
|
||||
private Sequence<Row> mergeGroupByResults(final GroupByQuery query, QueryRunner<Row> runner)
|
||||
{
|
||||
final QueryGranularity gran = query.getGranularity();
|
||||
final long timeStart = query.getIntervals().get(0).getStartMillis();
|
||||
|
||||
// use gran.iterable instead of gran.truncate so that
|
||||
// AllGranularity returns timeStart instead of Long.MIN_VALUE
|
||||
final long granTimeStart = gran.iterable(timeStart, timeStart+1).iterator().next();
|
||||
final long granTimeStart = gran.iterable(timeStart, timeStart + 1).iterator().next();
|
||||
|
||||
final List<AggregatorFactory> aggs = Lists.transform(
|
||||
query.getAggregatorSpecs(),
|
||||
@ -144,7 +143,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
||||
);
|
||||
|
||||
// convert millis back to timestamp according to granularity to preserve time zone information
|
||||
return Sequences.map(
|
||||
Sequence<Row> retVal = Sequences.map(
|
||||
Sequences.simple(index.iterableWithPostAggregations(query.getPostAggregatorSpecs())),
|
||||
new Function<Row, Row>()
|
||||
{
|
||||
@ -156,6 +155,8 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return query.applyLimit(retVal);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -183,9 +184,24 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<Row, Row> makeMetricManipulatorFn(GroupByQuery query, MetricManipulationFn fn)
|
||||
public Function<Row, Row> makeMetricManipulatorFn(final GroupByQuery query, final MetricManipulationFn fn)
|
||||
{
|
||||
return Functions.identity();
|
||||
return new Function<Row, Row>()
|
||||
{
|
||||
@Override
|
||||
public Row apply(Row input)
|
||||
{
|
||||
if (input instanceof MapBasedRow) {
|
||||
final MapBasedRow inputRow = (MapBasedRow) input;
|
||||
final Map<String, Object> values = Maps.newHashMap(((MapBasedRow) input).getEvent());
|
||||
for (AggregatorFactory agg : query.getAggregatorSpecs()) {
|
||||
values.put(agg.getName(), fn.manipulate(agg, inputRow.getEvent().get(agg.getName())));
|
||||
}
|
||||
return new MapBasedRow(inputRow.getTimestamp(), values);
|
||||
}
|
||||
return input;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -0,0 +1,184 @@
|
||||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package com.metamx.druid.query.group.limit;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Ordering;
|
||||
import com.google.common.primitives.Longs;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.common.guava.Sequences;
|
||||
import com.metamx.druid.aggregation.AggregatorFactory;
|
||||
import com.metamx.druid.aggregation.post.PostAggregator;
|
||||
import com.metamx.druid.input.Row;
|
||||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class DefaultLimitSpec implements LimitSpec
|
||||
{
|
||||
private final List<OrderByColumnSpec> orderBy;
|
||||
private final int limit;
|
||||
|
||||
@JsonCreator
|
||||
public DefaultLimitSpec(
|
||||
@JsonProperty("orderBy") List<OrderByColumnSpec> orderBy,
|
||||
@JsonProperty("limit") int limit
|
||||
)
|
||||
{
|
||||
this.orderBy = (orderBy == null) ? ImmutableList.<OrderByColumnSpec>of() : orderBy;
|
||||
this.limit = limit;
|
||||
|
||||
Preconditions.checkState(limit > 0, "limit[%s] must be >0", limit);
|
||||
}
|
||||
|
||||
public DefaultLimitSpec()
|
||||
{
|
||||
this.orderBy = Lists.newArrayList();
|
||||
this.limit = 0;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public List<OrderByColumnSpec> getOrderBy()
|
||||
{
|
||||
return orderBy;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public int getLimit()
|
||||
{
|
||||
return limit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<Sequence<Row>, Sequence<Row>> build(
|
||||
List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs
|
||||
)
|
||||
{
|
||||
// Materialize the Comparator first for fast-fail error checking.
|
||||
final Comparator<Row> comparator = makeComparator(dimensions, aggs, postAggs);
|
||||
|
||||
return new Function<Sequence<Row>, Sequence<Row>>()
|
||||
{
|
||||
@Override
|
||||
public Sequence<Row> apply(Sequence<Row> input)
|
||||
{
|
||||
return Sequences.limit(Sequences.sort(input, comparator), limit);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private Comparator<Row> makeComparator(
|
||||
List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs
|
||||
)
|
||||
{
|
||||
Ordering<Row> ordering = new Ordering<Row>()
|
||||
{
|
||||
@Override
|
||||
public int compare(Row left, Row right)
|
||||
{
|
||||
return Longs.compare(left.getTimestampFromEpoch(), right.getTimestampFromEpoch());
|
||||
}
|
||||
};
|
||||
|
||||
Map<String, Ordering<Row>> possibleOrderings = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
||||
for (DimensionSpec spec : dimensions) {
|
||||
final String dimension = spec.getOutputName();
|
||||
possibleOrderings.put(dimension, dimensionOrdering(dimension));
|
||||
}
|
||||
|
||||
for (final AggregatorFactory agg : aggs) {
|
||||
final String column = agg.getName();
|
||||
possibleOrderings.put(column, metricOrdering(column, agg.getComparator()));
|
||||
}
|
||||
|
||||
for (PostAggregator postAgg : postAggs) {
|
||||
final String column = postAgg.getName();
|
||||
possibleOrderings.put(column, metricOrdering(column, postAgg.getComparator()));
|
||||
}
|
||||
|
||||
for (OrderByColumnSpec columnSpec : orderBy) {
|
||||
Ordering<Row> nextOrdering = possibleOrderings.get(columnSpec.getDimension());
|
||||
|
||||
if (nextOrdering == null) {
|
||||
throw new ISE("Unknown column in order clause[%s]", columnSpec);
|
||||
}
|
||||
|
||||
switch (columnSpec.getDirection()) {
|
||||
case DESCENDING:
|
||||
nextOrdering = nextOrdering.reverse();
|
||||
}
|
||||
|
||||
ordering = ordering.compound(nextOrdering);
|
||||
}
|
||||
|
||||
return ordering;
|
||||
}
|
||||
|
||||
private Ordering<Row> metricOrdering(final String column, final Comparator comparator)
|
||||
{
|
||||
return new Ordering<Row>()
|
||||
{
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public int compare(Row left, Row right)
|
||||
{
|
||||
return comparator.compare(left.getFloatMetric(column), right.getFloatMetric(column));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private Ordering<Row> dimensionOrdering(final String dimension)
|
||||
{
|
||||
return Ordering.natural()
|
||||
.nullsFirst()
|
||||
.onResultOf(
|
||||
new Function<Row, String>()
|
||||
{
|
||||
@Override
|
||||
public String apply(Row input)
|
||||
{
|
||||
// Multi-value dimensions have all been flattened at this point;
|
||||
final List<String> dimList = input.getDimension(dimension);
|
||||
return dimList.isEmpty() ? null : dimList.get(0);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return "DefaultLimitSpec{" +
|
||||
"orderBy='" + orderBy + '\'' +
|
||||
", limit=" + limit +
|
||||
'}';
|
||||
}
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package com.metamx.druid.query.group.limit;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.google.common.base.Function;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.druid.aggregation.AggregatorFactory;
|
||||
import com.metamx.druid.aggregation.post.PostAggregator;
|
||||
import com.metamx.druid.input.Row;
|
||||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = NoopLimitSpec.class)
|
||||
@JsonSubTypes(value = {
|
||||
@JsonSubTypes.Type(name = "default", value = DefaultLimitSpec.class)
|
||||
})
|
||||
public interface LimitSpec
|
||||
{
|
||||
public Function<Sequence<Row>, Sequence<Row>> build(List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs);
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
package com.metamx.druid.query.group.limit;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Functions;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.druid.aggregation.AggregatorFactory;
|
||||
import com.metamx.druid.aggregation.post.PostAggregator;
|
||||
import com.metamx.druid.input.Row;
|
||||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class NoopLimitSpec implements LimitSpec
|
||||
{
|
||||
@Override
|
||||
public Function<Sequence<Row>, Sequence<Row>> build(
|
||||
List<DimensionSpec> dimensions, List<AggregatorFactory> aggs, List<PostAggregator> postAggs
|
||||
)
|
||||
{
|
||||
return Functions.identity();
|
||||
}
|
||||
}
|
@ -0,0 +1,120 @@
|
||||
package com.metamx.druid.query.group.limit;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.metamx.common.IAE;
|
||||
import com.metamx.common.ISE;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class OrderByColumnSpec
|
||||
{
|
||||
/**
|
||||
* Maintain a map of the enum values so that we can just do a lookup and get a null if it doesn't exist instead
|
||||
* of an exception thrown.
|
||||
*/
|
||||
private static final Map<String, Direction> stupidEnumMap;
|
||||
|
||||
static
|
||||
{
|
||||
final ImmutableMap.Builder<String, Direction> bob = ImmutableMap.builder();
|
||||
for (Direction direction : Direction.values()) {
|
||||
bob.put(direction.toString(), direction);
|
||||
}
|
||||
stupidEnumMap = bob.build();
|
||||
}
|
||||
|
||||
private final String dimension;
|
||||
private final Direction direction;
|
||||
|
||||
@JsonCreator
|
||||
public static OrderByColumnSpec create(Object obj)
|
||||
{
|
||||
Preconditions.checkNotNull(obj, "Cannot build an OrderByColumnSpec from a null object.");
|
||||
|
||||
if (obj instanceof String) {
|
||||
return new OrderByColumnSpec(obj.toString(), null);
|
||||
}
|
||||
else if (obj instanceof Map) {
|
||||
final Map map = (Map) obj;
|
||||
|
||||
final String dimension = map.get("dimension").toString();
|
||||
final Direction direction = determineDirection(map.get("direction"));
|
||||
|
||||
return new OrderByColumnSpec(dimension, direction);
|
||||
}
|
||||
else {
|
||||
throw new ISE("Cannot build an OrderByColumnSpec from a %s", obj.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
public OrderByColumnSpec(
|
||||
String dimension,
|
||||
Direction direction
|
||||
)
|
||||
{
|
||||
this.dimension = dimension;
|
||||
this.direction = direction == null ? Direction.ASCENDING : direction;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public String getDimension()
|
||||
{
|
||||
return dimension;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public Direction getDirection()
|
||||
{
|
||||
return direction;
|
||||
}
|
||||
|
||||
public static Direction determineDirection(Object directionObj)
|
||||
{
|
||||
if (directionObj == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String directionString = directionObj.toString();
|
||||
|
||||
Direction direction = stupidEnumMap.get(directionString);
|
||||
|
||||
if (direction == null) {
|
||||
final String lowerDimension = directionString.toLowerCase();
|
||||
|
||||
for (Direction dir : Direction.values()) {
|
||||
if (dir.toString().toLowerCase().startsWith(lowerDimension)) {
|
||||
if (direction != null) {
|
||||
throw new ISE("Ambiguous directions[%s] and [%s]", direction, dir);
|
||||
}
|
||||
direction = dir;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (direction == null) {
|
||||
throw new IAE("Unknown direction[%s]", directionString);
|
||||
}
|
||||
|
||||
return direction;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString()
|
||||
{
|
||||
return "OrderByColumnSpec{" +
|
||||
"dimension='" + dimension + '\'' +
|
||||
", direction=" + direction +
|
||||
'}';
|
||||
}
|
||||
|
||||
public static enum Direction
|
||||
{
|
||||
ASCENDING,
|
||||
DESCENDING
|
||||
}
|
||||
}
|
@ -28,7 +28,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@ -42,7 +42,7 @@ public abstract class DbConnectorConfig
|
||||
@Config("druid.database.segmentTable")
|
||||
public abstract String getSegmentTable();
|
||||
|
||||
@JsonProperty("validationQuery")
|
||||
@JsonProperty("useValidationQuery")
|
||||
@Config("druid.database.validation")
|
||||
public boolean isValidationQuery() {
|
||||
return false;
|
||||
|
@ -9,7 +9,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@ -28,7 +28,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@ -28,7 +28,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@ -28,7 +28,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
4
pom.xml
4
pom.xml
@ -23,7 +23,7 @@
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
<name>druid</name>
|
||||
<description>druid</description>
|
||||
<scm>
|
||||
@ -38,7 +38,7 @@
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<metamx.java-util.version>0.22.0</metamx.java-util.version>
|
||||
<metamx.java-util.version>0.22.3</metamx.java-util.version>
|
||||
<netflix.curator.version>2.0.1-21-22</netflix.curator.version>
|
||||
</properties>
|
||||
|
||||
|
@ -28,7 +28,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@ -28,7 +28,7 @@
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
@ -22,7 +22,6 @@ package com.metamx.druid.query.group;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Supplier;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
@ -41,6 +40,7 @@ import com.metamx.druid.query.QueryRunner;
|
||||
import com.metamx.druid.query.QueryRunnerTestHelper;
|
||||
import com.metamx.druid.query.dimension.DefaultDimensionSpec;
|
||||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
import com.metamx.druid.query.group.limit.OrderByColumnSpec;
|
||||
import com.metamx.druid.query.segment.MultipleIntervalSegmentSpec;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
@ -126,7 +126,7 @@ public class GroupByQueryRunnerTest
|
||||
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||
.build();
|
||||
|
||||
List<Row> expectedResults = Arrays.<Row>asList(
|
||||
List<Row> expectedResults = Arrays.asList(
|
||||
createExpectedRow("2011-04-01", "alias", "automotive", "rows", 1L, "idx", 135L),
|
||||
createExpectedRow("2011-04-01", "alias", "business", "rows", 1L, "idx", 118L),
|
||||
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 158L),
|
||||
@ -187,25 +187,25 @@ public class GroupByQueryRunnerTest
|
||||
.build();
|
||||
|
||||
List<Row> expectedResults = Arrays.asList(
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "automotive", "rows", 1L, "idx", 135L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "business", "rows", 1L, "idx", 118L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "entertainment", "rows", 1L, "idx", 158L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "health", "rows", 1L, "idx", 120L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "mezzanine", "rows", 3L, "idx", 2870L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "news", "rows", 1L, "idx", 121L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "premium", "rows", 3L, "idx", 2900L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "technology", "rows", 1L, "idx", 78L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "travel", "rows", 1L, "idx", 119L)),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "automotive", "rows", 1L, "idx", 135L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "business", "rows", 1L, "idx", 118L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "entertainment", "rows", 1L, "idx", 158L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "health", "rows", 1L, "idx", 120L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "mezzanine", "rows", 3L, "idx", 2870L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "news", "rows", 1L, "idx", 121L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "premium", "rows", 3L, "idx", 2900L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "technology", "rows", 1L, "idx", 78L),
|
||||
createExpectedRow(new DateTime("2011-03-31", tz), "alias", "travel", "rows", 1L, "idx", 119L),
|
||||
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "automotive", "rows", 1L, "idx", 147L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "business", "rows", 1L, "idx", 112L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "entertainment", "rows", 1L, "idx", 166L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "health", "rows", 1L, "idx", 113L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "mezzanine", "rows", 3L, "idx", 2447L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "news", "rows", 1L, "idx", 114L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "premium", "rows", 3L, "idx", 2505L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "technology", "rows", 1L, "idx", 97L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "travel", "rows", 1L, "idx", 126L))
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "automotive", "rows", 1L, "idx", 147L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "business", "rows", 1L, "idx", 112L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "entertainment", "rows", 1L, "idx", 166L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "health", "rows", 1L, "idx", 113L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "mezzanine", "rows", 3L, "idx", 2447L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "news", "rows", 1L, "idx", 114L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "premium", "rows", 3L, "idx", 2505L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "technology", "rows", 1L, "idx", 97L),
|
||||
createExpectedRow(new DateTime("2011-04-01", tz), "alias", "travel", "rows", 1L, "idx", 126L)
|
||||
);
|
||||
|
||||
Iterable<Row> results = Sequences.toList(
|
||||
@ -252,7 +252,7 @@ public class GroupByQueryRunnerTest
|
||||
}
|
||||
);
|
||||
|
||||
List<Row> expectedResults = Arrays.<Row>asList(
|
||||
List<Row> expectedResults = Arrays.asList(
|
||||
createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L),
|
||||
createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L),
|
||||
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L),
|
||||
@ -267,7 +267,7 @@ public class GroupByQueryRunnerTest
|
||||
TestHelper.assertExpectedObjects(expectedResults, runner.run(fullQuery), "direct");
|
||||
TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery), "merged");
|
||||
|
||||
List<Row> allGranExpectedResults = Arrays.<Row>asList(
|
||||
List<Row> allGranExpectedResults = Arrays.asList(
|
||||
createExpectedRow("2011-04-02", "alias", "automotive", "rows", 2L, "idx", 269L),
|
||||
createExpectedRow("2011-04-02", "alias", "business", "rows", 2L, "idx", 217L),
|
||||
createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 2L, "idx", 319L),
|
||||
@ -284,7 +284,92 @@ public class GroupByQueryRunnerTest
|
||||
|
||||
}
|
||||
|
||||
private MapBasedRow createExpectedRow(final String timestamp, Object... vals)
|
||||
@Test
|
||||
public void testGroupByOrderLimit() throws Exception
|
||||
{
|
||||
GroupByQuery.Builder builder = GroupByQuery
|
||||
.builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setInterval("2011-04-02/2011-04-04")
|
||||
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias")))
|
||||
.setAggregatorSpecs(
|
||||
Arrays.<AggregatorFactory>asList(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
new LongSumAggregatorFactory("idx", "index")
|
||||
)
|
||||
)
|
||||
.addOrderByColumn("rows")
|
||||
.addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING)
|
||||
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
|
||||
|
||||
final GroupByQuery query = builder.build();
|
||||
|
||||
List<Row> expectedResults = Arrays.asList(
|
||||
createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L),
|
||||
createExpectedRow("2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L),
|
||||
createExpectedRow("2011-04-01", "alias", "news", "rows", 2L, "idx", 221L),
|
||||
createExpectedRow("2011-04-01", "alias", "health", "rows", 2L, "idx", 216L),
|
||||
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L),
|
||||
createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L),
|
||||
createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L),
|
||||
createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L),
|
||||
createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)
|
||||
);
|
||||
|
||||
QueryRunner<Row> mergeRunner = new GroupByQueryQueryToolChest().mergeResults(runner);
|
||||
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit");
|
||||
|
||||
TestHelper.assertExpectedObjects(
|
||||
Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build()), "limited"
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByWithOrderLimit2() throws Exception
|
||||
{
|
||||
GroupByQuery.Builder builder = GroupByQuery
|
||||
.builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setInterval("2011-04-02/2011-04-04")
|
||||
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias")))
|
||||
.setAggregatorSpecs(
|
||||
Arrays.<AggregatorFactory>asList(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
new LongSumAggregatorFactory("idx", "index")
|
||||
)
|
||||
)
|
||||
.addOrderByColumn("rows", "desc")
|
||||
.addOrderByColumn("alias", "d")
|
||||
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
|
||||
|
||||
final GroupByQuery query = builder.build();
|
||||
|
||||
List<Row> expectedResults = Arrays.asList(
|
||||
createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L),
|
||||
createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L),
|
||||
createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L),
|
||||
createExpectedRow("2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L),
|
||||
createExpectedRow("2011-04-01", "alias", "news", "rows", 2L, "idx", 221L),
|
||||
createExpectedRow("2011-04-01", "alias", "health", "rows", 2L, "idx", 216L),
|
||||
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L),
|
||||
createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L),
|
||||
createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L)
|
||||
);
|
||||
|
||||
QueryRunner<Row> mergeRunner = new GroupByQueryQueryToolChest().mergeResults(runner);
|
||||
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit");
|
||||
TestHelper.assertExpectedObjects(
|
||||
Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build()), "limited"
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
private Row createExpectedRow(final String timestamp, Object... vals)
|
||||
{
|
||||
return createExpectedRow(new DateTime(timestamp), vals);
|
||||
}
|
||||
|
||||
private Row createExpectedRow(final DateTime timestamp, Object... vals)
|
||||
{
|
||||
Preconditions.checkArgument(vals.length % 2 == 0);
|
||||
|
||||
@ -293,6 +378,6 @@ public class GroupByQueryRunnerTest
|
||||
theVals.put(vals[i].toString(), vals[i+1]);
|
||||
}
|
||||
|
||||
return new MapBasedRow(new DateTime(timestamp), theVals);
|
||||
return new MapBasedRow(timestamp, theVals);
|
||||
}
|
||||
}
|
||||
|
@ -34,7 +34,6 @@ import com.metamx.druid.query.timeseries.TimeseriesQuery;
|
||||
import com.metamx.druid.query.timeseries.TimeseriesQueryRunnerTest;
|
||||
import com.metamx.druid.result.Result;
|
||||
import com.metamx.druid.result.TimeseriesResultValue;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
@ -48,6 +47,7 @@ import java.util.Collection;
|
||||
@RunWith(Parameterized.class)
|
||||
public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest
|
||||
{
|
||||
@SuppressWarnings("unchecked")
|
||||
@Parameterized.Parameters
|
||||
public static Collection<?> constructorFeeder() throws IOException
|
||||
{
|
||||
|
@ -24,11 +24,11 @@
|
||||
<artifactId>druid-services</artifactId>
|
||||
<name>druid-services</name>
|
||||
<description>druid-services</description>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
<parent>
|
||||
<groupId>com.metamx</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>0.4.1-SNAPSHOT</version>
|
||||
<version>0.4.4-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<dependencies>
|
||||
|
Loading…
x
Reference in New Issue
Block a user