mirror of https://github.com/apache/druid.git
finalize variables and optimize imports
This commit is contained in:
parent
be1ef3a161
commit
5601c51ae6
|
@ -19,7 +19,6 @@
|
|||
|
||||
package io.druid.query;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
|
|
|
@ -23,25 +23,14 @@ package io.druid.query;
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.druid.query.groupby.GroupByQuery;
|
||||
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
|
||||
import io.druid.query.search.search.SearchQuery;
|
||||
import io.druid.query.timeboundary.TimeBoundaryQuery;
|
||||
import io.druid.query.timeseries.TimeseriesQuery;
|
||||
import io.druid.query.topn.TopNQuery;
|
||||
|
||||
public class QueryDataSource implements DataSource
|
||||
{
|
||||
@JsonProperty
|
||||
private Query query;
|
||||
private final Query query;
|
||||
|
||||
public QueryDataSource()
|
||||
{
|
||||
}
|
||||
|
||||
public QueryDataSource(Query query)
|
||||
@JsonCreator
|
||||
public QueryDataSource(@JsonProperty("query") Query query)
|
||||
{
|
||||
this.query = query;
|
||||
}
|
||||
|
@ -51,11 +40,6 @@ public class QueryDataSource implements DataSource
|
|||
return query;
|
||||
}
|
||||
|
||||
public void setQuery(Query query)
|
||||
{
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
public String toString() { return query.toString(); }
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
package io.druid.query;
|
||||
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import org.joda.time.Period;
|
||||
|
||||
/**
|
||||
* If there's a subquery, run it instead of the outer query
|
||||
|
@ -41,9 +40,8 @@ public class SubqueryQueryRunner<T> implements QueryRunner<T>
|
|||
{
|
||||
DataSource dataSource = query.getDataSource();
|
||||
if (dataSource instanceof QueryDataSource) {
|
||||
return run((Query<T>) ((QueryDataSource)dataSource).getQuery());
|
||||
}
|
||||
else {
|
||||
return run((Query<T>) ((QueryDataSource) dataSource).getQuery());
|
||||
} else {
|
||||
return baseRunner.run(query);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,18 +26,12 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
public class TableDataSource implements DataSource
|
||||
{
|
||||
@JsonProperty
|
||||
private String name;
|
||||
private final String name;
|
||||
|
||||
@JsonCreator
|
||||
public TableDataSource()
|
||||
public TableDataSource(@JsonProperty("name") String name)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@JsonCreator
|
||||
public TableDataSource(String name)
|
||||
{
|
||||
this.name = name==null? name : name.toLowerCase();
|
||||
this.name = name.toLowerCase();
|
||||
}
|
||||
|
||||
public String getName()
|
||||
|
@ -45,11 +39,6 @@ public class TableDataSource implements DataSource
|
|||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name)
|
||||
{
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String toString() { return name; }
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,7 +32,12 @@ import com.metamx.common.guava.Sequence;
|
|||
import com.metamx.common.guava.Sequences;
|
||||
import io.druid.data.input.Row;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BaseQuery;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.Queries;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryDataSource;
|
||||
import io.druid.query.TableDataSource;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.PostAggregator;
|
||||
import io.druid.query.dimension.DefaultDimensionSpec;
|
||||
|
|
|
@ -24,9 +24,6 @@ import com.google.common.collect.Lists;
|
|||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.Pair;
|
||||
import com.metamx.common.guava.Accumulator;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.common.guava.Sequences;
|
||||
import io.druid.data.input.MapBasedRow;
|
||||
import io.druid.data.input.Row;
|
||||
import io.druid.data.input.Rows;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
|
|
|
@ -34,7 +34,13 @@ import com.metamx.common.guava.Sequences;
|
|||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
import io.druid.data.input.MapBasedRow;
|
||||
import io.druid.data.input.Row;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.IntervalChunkingQueryRunner;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryDataSource;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.SubqueryQueryRunner;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.MetricManipulationFn;
|
||||
import io.druid.segment.incremental.IncrementalIndex;
|
||||
|
@ -93,7 +99,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
|||
if (dataSource instanceof QueryDataSource) {
|
||||
GroupByQuery subquery;
|
||||
try {
|
||||
subquery = (GroupByQuery) ((QueryDataSource)dataSource).getQuery();
|
||||
subquery = (GroupByQuery) ((QueryDataSource) dataSource).getQuery();
|
||||
} catch (ClassCastException e) {
|
||||
throw new UnsupportedOperationException("Subqueries must be of type 'group by'");
|
||||
}
|
||||
|
@ -101,8 +107,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest<Row, GroupByQuery
|
|||
IncrementalIndexStorageAdapter adapter
|
||||
= new IncrementalIndexStorageAdapter(makeIncrementalIndex(subquery, subqueryResult));
|
||||
result = engine.process(query, adapter);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
result = runner.run(query);
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,11 @@ import com.metamx.common.guava.nary.BinaryFn;
|
|||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
import io.druid.collections.OrderedMergeSequence;
|
||||
import io.druid.common.utils.JodaUtils;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.CacheStrategy;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.ResultMergeQueryRunner;
|
||||
import io.druid.query.aggregation.MetricManipulationFn;
|
||||
import io.druid.query.metadata.metadata.ColumnAnalysis;
|
||||
import io.druid.query.metadata.metadata.SegmentAnalysis;
|
||||
|
@ -175,9 +179,9 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest<SegmentAn
|
|||
{
|
||||
byte[] includerBytes = query.getToInclude().getCacheKey();
|
||||
return ByteBuffer.allocate(1 + includerBytes.length)
|
||||
.put(SEGMENT_METADATA_CACHE_PREFIX)
|
||||
.put(includerBytes)
|
||||
.array();
|
||||
.put(SEGMENT_METADATA_CACHE_PREFIX)
|
||||
.put(includerBytes)
|
||||
.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -37,7 +37,14 @@ import com.metamx.common.guava.Sequences;
|
|||
import com.metamx.common.guava.nary.BinaryFn;
|
||||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
import io.druid.collections.OrderedMergeSequence;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.CacheStrategy;
|
||||
import io.druid.query.IntervalChunkingQueryRunner;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.ResultGranularTimestampComparator;
|
||||
import io.druid.query.ResultMergeQueryRunner;
|
||||
import io.druid.query.aggregation.MetricManipulationFn;
|
||||
import io.druid.query.filter.DimFilter;
|
||||
import io.druid.query.search.search.SearchHit;
|
||||
|
@ -166,7 +173,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
|
|||
final ByteBuffer queryCacheKey = ByteBuffer
|
||||
.allocate(
|
||||
1 + 4 + granularityBytes.length + filterBytes.length +
|
||||
querySpecBytes.length + dimensionsBytesSize
|
||||
querySpecBytes.length + dimensionsBytesSize
|
||||
)
|
||||
.put(SEARCH_QUERY)
|
||||
.put(Ints.toByteArray(query.getLimit()))
|
||||
|
|
|
@ -25,7 +25,10 @@ import com.google.common.base.Function;
|
|||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Lists;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BaseQuery;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.filter.DimFilter;
|
||||
import io.druid.query.search.SearchResultValue;
|
||||
import io.druid.query.spec.QuerySegmentSpec;
|
||||
|
@ -179,14 +182,14 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
|
|||
public String toString()
|
||||
{
|
||||
return "SearchQuery{" +
|
||||
"dataSource='" + getDataSource() + '\'' +
|
||||
", dimFilter=" + dimFilter +
|
||||
", granularity='" + granularity + '\'' +
|
||||
", dimensions=" + dimensions +
|
||||
", querySpec=" + querySpec +
|
||||
", querySegmentSpec=" + getQuerySegmentSpec() +
|
||||
", limit=" + limit +
|
||||
'}';
|
||||
"dataSource='" + getDataSource() + '\'' +
|
||||
", dimFilter=" + dimFilter +
|
||||
", granularity='" + granularity + '\'' +
|
||||
", dimensions=" + dimensions +
|
||||
", querySpec=" + querySpec +
|
||||
", querySegmentSpec=" + getQuerySegmentSpec() +
|
||||
", limit=" + limit +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,7 +24,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BaseQuery;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.spec.MultipleIntervalSegmentSpec;
|
||||
import io.druid.query.spec.QuerySegmentSpec;
|
||||
import org.joda.time.DateTime;
|
||||
|
|
|
@ -31,7 +31,12 @@ import com.metamx.common.guava.Sequence;
|
|||
import com.metamx.common.guava.Sequences;
|
||||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
import io.druid.collections.OrderedMergeSequence;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BySegmentSkippingQueryRunner;
|
||||
import io.druid.query.CacheStrategy;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.aggregation.MetricManipulationFn;
|
||||
import io.druid.timeline.LogicalSegment;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -73,7 +78,7 @@ public class TimeBoundaryQueryQueryToolChest
|
|||
public boolean apply(T input)
|
||||
{
|
||||
return input.getInterval().overlaps(first.getInterval()) || input.getInterval()
|
||||
.overlaps(second.getInterval());
|
||||
.overlaps(second.getInterval());
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -141,9 +146,9 @@ public class TimeBoundaryQueryQueryToolChest
|
|||
public byte[] computeCacheKey(TimeBoundaryQuery query)
|
||||
{
|
||||
return ByteBuffer.allocate(2)
|
||||
.put(TIMEBOUNDARY_QUERY)
|
||||
.put(query.getCacheKey())
|
||||
.array();
|
||||
.put(TIMEBOUNDARY_QUERY)
|
||||
.put(query.getCacheKey())
|
||||
.array();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,7 +24,11 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import com.fasterxml.jackson.annotation.JsonTypeName;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BaseQuery;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.Queries;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.PostAggregator;
|
||||
import io.druid.query.filter.DimFilter;
|
||||
|
@ -129,14 +133,14 @@ public class TimeseriesQuery extends BaseQuery<Result<TimeseriesResultValue>>
|
|||
public String toString()
|
||||
{
|
||||
return "TimeseriesQuery{" +
|
||||
"dataSource='" + getDataSource() + '\'' +
|
||||
", querySegmentSpec=" + getQuerySegmentSpec() +
|
||||
", dimFilter=" + dimFilter +
|
||||
", granularity='" + granularity + '\'' +
|
||||
", aggregatorSpecs=" + aggregatorSpecs +
|
||||
", postAggregatorSpecs=" + postAggregatorSpecs +
|
||||
", context=" + getContext() +
|
||||
'}';
|
||||
"dataSource='" + getDataSource() + '\'' +
|
||||
", querySegmentSpec=" + getQuerySegmentSpec() +
|
||||
", dimFilter=" + dimFilter +
|
||||
", granularity='" + granularity + '\'' +
|
||||
", aggregatorSpecs=" + aggregatorSpecs +
|
||||
", postAggregatorSpecs=" + postAggregatorSpecs +
|
||||
", context=" + getContext() +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,8 +22,6 @@ package io.druid.query.timeseries;
|
|||
import com.google.common.base.Function;
|
||||
import com.metamx.common.guava.BaseSequence;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunnerHelper;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.aggregation.Aggregator;
|
||||
|
|
|
@ -32,7 +32,16 @@ import com.metamx.common.guava.nary.BinaryFn;
|
|||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
import io.druid.collections.OrderedMergeSequence;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.CacheStrategy;
|
||||
import io.druid.query.IntervalChunkingQueryRunner;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryCacheHelper;
|
||||
import io.druid.query.QueryConfig;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.ResultGranularTimestampComparator;
|
||||
import io.druid.query.ResultMergeQueryRunner;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.MetricManipulationFn;
|
||||
import io.druid.query.aggregation.PostAggregator;
|
||||
|
|
|
@ -22,7 +22,13 @@ package io.druid.query.timeseries;
|
|||
import com.google.inject.Inject;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.ChainedExecutionQueryRunner;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryConfig;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryRunnerFactory;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.segment.Segment;
|
||||
import io.druid.segment.StorageAdapter;
|
||||
|
||||
|
|
|
@ -24,7 +24,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BaseQuery;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.Queries;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.PostAggregator;
|
||||
import io.druid.query.dimension.DimensionSpec;
|
||||
|
|
|
@ -31,7 +31,11 @@ import io.druid.query.aggregation.post.ConstantPostAggregator;
|
|||
import io.druid.query.aggregation.post.FieldAccessPostAggregator;
|
||||
import io.druid.query.spec.MultipleIntervalSegmentSpec;
|
||||
import io.druid.query.spec.QuerySegmentSpec;
|
||||
import io.druid.segment.*;
|
||||
import io.druid.segment.IncrementalIndexSegment;
|
||||
import io.druid.segment.QueryableIndex;
|
||||
import io.druid.segment.QueryableIndexSegment;
|
||||
import io.druid.segment.Segment;
|
||||
import io.druid.segment.TestIndex;
|
||||
import io.druid.segment.incremental.IncrementalIndex;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
|
|
|
@ -22,22 +22,14 @@
|
|||
package io.druid.query.search;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.Lists;
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import io.druid.query.Druids;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunnerTestHelper;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.LongSumAggregatorFactory;
|
||||
import io.druid.query.dimension.DefaultDimensionSpec;
|
||||
import io.druid.query.dimension.DimensionSpec;
|
||||
import io.druid.query.groupby.GroupByQuery;
|
||||
import io.druid.query.search.search.SearchQuery;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class SearchQueryTest
|
||||
{
|
||||
|
|
|
@ -25,7 +25,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
|||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import io.druid.query.Druids;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunnerTestHelper;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -39,8 +38,8 @@ public class TimeBoundaryQueryTest
|
|||
public void testQuerySerialization() throws IOException
|
||||
{
|
||||
Query query = Druids.newTimeBoundaryQueryBuilder()
|
||||
.dataSource("testing")
|
||||
.build();
|
||||
.dataSource("testing")
|
||||
.build();
|
||||
|
||||
String json = jsonMapper.writeValueAsString(query);
|
||||
Query serdeQuery = jsonMapper.readValue(json, Query.class);
|
||||
|
|
|
@ -54,8 +54,6 @@ import java.util.Collection;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static io.druid.query.QueryRunnerTestHelper.*;
|
||||
|
||||
/**
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
|
@ -135,32 +133,32 @@ public class TopNQueryRunnerTest
|
|||
new TopNResultValue(
|
||||
Arrays.<Map<String, Object>>asList(
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(providerDimension, "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1743.9217529296875D)
|
||||
.put("minIndex", 792.3260498046875D)
|
||||
.build(),
|
||||
.put(providerDimension, "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1743.9217529296875D)
|
||||
.put("minIndex", 792.3260498046875D)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(providerDimension, "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1870.06103515625D)
|
||||
.put("minIndex", 545.9906005859375D)
|
||||
.build(),
|
||||
.put(providerDimension, "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1870.06103515625D)
|
||||
.put("minIndex", 545.9906005859375D)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(providerDimension, "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_9)
|
||||
.put("maxIndex", 277.2735290527344D)
|
||||
.put("minIndex", 59.02102279663086D)
|
||||
.build()
|
||||
.put(providerDimension, "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_9)
|
||||
.put("maxIndex", 277.2735290527344D)
|
||||
.put("minIndex", 59.02102279663086D)
|
||||
.build()
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -199,32 +197,32 @@ public class TopNQueryRunnerTest
|
|||
new TopNResultValue(
|
||||
Arrays.<Map<String, Object>>asList(
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(providerDimension, "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1743.9217529296875D)
|
||||
.put("minIndex", 792.3260498046875D)
|
||||
.build(),
|
||||
.put(providerDimension, "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1743.9217529296875D)
|
||||
.put("minIndex", 792.3260498046875D)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(providerDimension, "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1870.06103515625D)
|
||||
.put("minIndex", 545.9906005859375D)
|
||||
.build(),
|
||||
.put(providerDimension, "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1870.06103515625D)
|
||||
.put("minIndex", 545.9906005859375D)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(providerDimension, "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_9)
|
||||
.put("maxIndex", 277.2735290527344D)
|
||||
.put("minIndex", 59.02102279663086D)
|
||||
.build()
|
||||
.put(providerDimension, "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_9)
|
||||
.put("maxIndex", 277.2735290527344D)
|
||||
.put("minIndex", 59.02102279663086D)
|
||||
.build()
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -264,32 +262,32 @@ public class TopNQueryRunnerTest
|
|||
new TopNResultValue(
|
||||
Arrays.<Map<String, Object>>asList(
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put("provider", "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_9)
|
||||
.put("maxIndex", 277.2735290527344D)
|
||||
.put("minIndex", 59.02102279663086D)
|
||||
.build(),
|
||||
.put("provider", "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_9)
|
||||
.put("maxIndex", 277.2735290527344D)
|
||||
.put("minIndex", 59.02102279663086D)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put("provider", "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1743.9217529296875D)
|
||||
.put("minIndex", 792.3260498046875D)
|
||||
.build(),
|
||||
.put("provider", "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1743.9217529296875D)
|
||||
.put("minIndex", 792.3260498046875D)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put("provider", "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1870.06103515625D)
|
||||
.put("minIndex", 545.9906005859375D)
|
||||
.build()
|
||||
.put("provider", "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
.put("uniques", QueryRunnerTestHelper.UNIQUES_2)
|
||||
.put("maxIndex", 1870.06103515625D)
|
||||
.put("minIndex", 545.9906005859375D)
|
||||
.build()
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -697,18 +695,18 @@ public class TopNQueryRunnerTest
|
|||
public void testTopNWithNonExistentFilterMultiDim()
|
||||
{
|
||||
AndDimFilter andDimFilter = Druids.newAndDimFilterBuilder()
|
||||
.fields(
|
||||
Lists.<DimFilter>newArrayList(
|
||||
Druids.newSelectorDimFilterBuilder()
|
||||
.dimension(providerDimension)
|
||||
.value("billyblank")
|
||||
.build(),
|
||||
Druids.newSelectorDimFilterBuilder()
|
||||
.dimension(QueryRunnerTestHelper.qualityDimension)
|
||||
.value("mezzanine")
|
||||
.build()
|
||||
)
|
||||
).build();
|
||||
.fields(
|
||||
Lists.<DimFilter>newArrayList(
|
||||
Druids.newSelectorDimFilterBuilder()
|
||||
.dimension(providerDimension)
|
||||
.value("billyblank")
|
||||
.build(),
|
||||
Druids.newSelectorDimFilterBuilder()
|
||||
.dimension(QueryRunnerTestHelper.qualityDimension)
|
||||
.value("mezzanine")
|
||||
.build()
|
||||
)
|
||||
).build();
|
||||
TopNQuery query = new TopNQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
|
|
|
@ -36,7 +36,13 @@ import org.junit.Test;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static io.druid.query.QueryRunnerTestHelper.*;
|
||||
import static io.druid.query.QueryRunnerTestHelper.addRowsIndexConstant;
|
||||
import static io.druid.query.QueryRunnerTestHelper.allGran;
|
||||
import static io.druid.query.QueryRunnerTestHelper.commonAggregators;
|
||||
import static io.druid.query.QueryRunnerTestHelper.dataSource;
|
||||
import static io.druid.query.QueryRunnerTestHelper.fullOnInterval;
|
||||
import static io.druid.query.QueryRunnerTestHelper.indexMetric;
|
||||
import static io.druid.query.QueryRunnerTestHelper.providerDimension;
|
||||
|
||||
public class TopNQueryTest
|
||||
{
|
||||
|
|
|
@ -25,12 +25,16 @@ import com.google.common.collect.Ordering;
|
|||
import com.google.inject.Inject;
|
||||
import com.metamx.common.logger.Logger;
|
||||
import com.metamx.http.client.HttpClient;
|
||||
import io.druid.client.selector.ServerSelector;
|
||||
import io.druid.client.selector.QueryableDruidServer;
|
||||
import io.druid.client.selector.ServerSelector;
|
||||
import io.druid.client.selector.ServerSelectorStrategy;
|
||||
import io.druid.concurrent.Execs;
|
||||
import io.druid.guice.annotations.Client;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.QueryDataSource;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryToolChestWarehouse;
|
||||
import io.druid.query.TableDataSource;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import io.druid.timeline.VersionedIntervalTimeline;
|
||||
import io.druid.timeline.partition.PartitionChunk;
|
||||
|
@ -235,13 +239,12 @@ public class BrokerServerView implements TimelineServerView
|
|||
{
|
||||
String table;
|
||||
while (dataSource instanceof QueryDataSource) {
|
||||
dataSource = ((QueryDataSource)dataSource).getQuery().getDataSource();
|
||||
dataSource = ((QueryDataSource) dataSource).getQuery().getDataSource();
|
||||
}
|
||||
|
||||
if (dataSource instanceof TableDataSource) {
|
||||
table = ((TableDataSource)dataSource).getName();
|
||||
}
|
||||
else {
|
||||
table = ((TableDataSource) dataSource).getName();
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Unsupported data source type: " + dataSource.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
|
|
|
@ -40,8 +40,8 @@ import com.metamx.common.guava.Sequence;
|
|||
import com.metamx.common.guava.Sequences;
|
||||
import com.metamx.emitter.EmittingLogger;
|
||||
import io.druid.client.cache.Cache;
|
||||
import io.druid.client.selector.ServerSelector;
|
||||
import io.druid.client.selector.QueryableDruidServer;
|
||||
import io.druid.client.selector.ServerSelector;
|
||||
import io.druid.guice.annotations.Smile;
|
||||
import io.druid.query.BySegmentResultValueClass;
|
||||
import io.druid.query.CacheStrategy;
|
||||
|
@ -124,7 +124,7 @@ public class CachingClusteredClient<T> implements QueryRunner<T>
|
|||
|
||||
final boolean useCache = Boolean.parseBoolean(query.getContextValue("useCache", "true")) && strategy != null;
|
||||
final boolean populateCache = Boolean.parseBoolean(query.getContextValue("populateCache", "true"))
|
||||
&& strategy != null;
|
||||
&& strategy != null;
|
||||
final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false"));
|
||||
|
||||
|
||||
|
@ -286,8 +286,7 @@ public class CachingClusteredClient<T> implements QueryRunner<T>
|
|||
objectMapper.getFactory().createParser(cachedResult),
|
||||
cacheObjectClazz
|
||||
);
|
||||
}
|
||||
catch (IOException e) {
|
||||
} catch (IOException e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
|
@ -340,7 +339,7 @@ public class CachingClusteredClient<T> implements QueryRunner<T>
|
|||
CachePopulator cachePopulator = cachePopulatorMap.get(
|
||||
String.format("%s_%s", segmentIdentifier, value.getInterval())
|
||||
);
|
||||
if(cachePopulator != null) {
|
||||
if (cachePopulator != null) {
|
||||
cachePopulator.populate(Iterables.transform(segmentResults, prepareForCache));
|
||||
}
|
||||
|
||||
|
@ -425,8 +424,7 @@ public class CachingClusteredClient<T> implements QueryRunner<T>
|
|||
}
|
||||
|
||||
cache.put(key, valueBytes);
|
||||
}
|
||||
catch (IOException e) {
|
||||
} catch (IOException e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,11 +30,19 @@ import com.metamx.common.lifecycle.LifecycleStop;
|
|||
import com.metamx.emitter.EmittingLogger;
|
||||
import io.druid.data.input.Firehose;
|
||||
import io.druid.data.input.InputRow;
|
||||
import io.druid.query.*;
|
||||
import io.druid.segment.ReferenceCountingSegment;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.FinalizeResultsQueryRunner;
|
||||
import io.druid.query.NoopQueryRunner;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryRunnerFactory;
|
||||
import io.druid.query.QueryRunnerFactoryConglomerate;
|
||||
import io.druid.query.QuerySegmentWalker;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.SegmentDescriptor;
|
||||
import io.druid.query.TableDataSource;
|
||||
import io.druid.segment.realtime.plumber.Plumber;
|
||||
import io.druid.segment.realtime.plumber.Sink;
|
||||
import io.druid.timeline.VersionedIntervalTimeline;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
import org.joda.time.Period;
|
||||
|
@ -90,6 +98,7 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||
Closeables.closeQuietly(chief);
|
||||
}
|
||||
}
|
||||
|
||||
public FireDepartmentMetrics getMetrics(String datasource)
|
||||
{
|
||||
FireChief chief = chiefs.get(datasource);
|
||||
|
@ -124,9 +133,8 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||
|
||||
String dataSourceName;
|
||||
try {
|
||||
dataSourceName = ((TableDataSource)query.getDataSource()).getName();
|
||||
}
|
||||
catch (ClassCastException e) {
|
||||
dataSourceName = ((TableDataSource) query.getDataSource()).getName();
|
||||
} catch (ClassCastException e) {
|
||||
throw new UnsupportedOperationException("Subqueries are only supported in the broker");
|
||||
}
|
||||
return dataSourceName;
|
||||
|
@ -164,8 +172,7 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||
log.info("Someone get us a plumber!");
|
||||
plumber = fireDepartment.findPlumber();
|
||||
log.info("We have our plumber!");
|
||||
}
|
||||
catch (IOException e) {
|
||||
} catch (IOException e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
|
@ -192,8 +199,7 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||
try {
|
||||
try {
|
||||
inputRow = firehose.nextRow();
|
||||
}
|
||||
catch (Exception e) {
|
||||
} catch (Exception e) {
|
||||
log.debug(e, "thrown away line due to exception, considering unparseable");
|
||||
metrics.incrementUnparseable();
|
||||
continue;
|
||||
|
@ -218,8 +224,7 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||
nextFlush = new DateTime().plus(intermediatePersistPeriod).getMillis();
|
||||
}
|
||||
metrics.incrementProcessed();
|
||||
}
|
||||
catch (FormattedException e) {
|
||||
} catch (FormattedException e) {
|
||||
log.info(e, "unparseable line: %s", e.getDetails());
|
||||
metrics.incrementUnparseable();
|
||||
continue;
|
||||
|
@ -227,16 +232,15 @@ public class RealtimeManager implements QuerySegmentWalker
|
|||
}
|
||||
} catch (RuntimeException e) {
|
||||
log.makeAlert(e, "RuntimeException aborted realtime processing[%s]", fireDepartment.getSchema().getDataSource())
|
||||
.emit();
|
||||
.emit();
|
||||
normalExit = false;
|
||||
throw e;
|
||||
} catch (Error e) {
|
||||
log.makeAlert(e, "Exception aborted realtime processing[%s]", fireDepartment.getSchema().getDataSource())
|
||||
.emit();
|
||||
.emit();
|
||||
normalExit = false;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
Closeables.closeQuietly(firehose);
|
||||
if (normalExit) {
|
||||
plumber.finishJob();
|
||||
|
|
|
@ -30,7 +30,20 @@ import com.metamx.emitter.service.ServiceEmitter;
|
|||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
import io.druid.collections.CountingMap;
|
||||
import io.druid.guice.annotations.Processing;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BySegmentQueryRunner;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.FinalizeResultsQueryRunner;
|
||||
import io.druid.query.MetricsEmittingQueryRunner;
|
||||
import io.druid.query.NoopQueryRunner;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryRunnerFactory;
|
||||
import io.druid.query.QueryRunnerFactoryConglomerate;
|
||||
import io.druid.query.QuerySegmentWalker;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.ReferenceCountingSegmentQueryRunner;
|
||||
import io.druid.query.SegmentDescriptor;
|
||||
import io.druid.query.TableDataSource;
|
||||
import io.druid.query.spec.QuerySegmentSpec;
|
||||
import io.druid.query.spec.SpecificSegmentQueryRunner;
|
||||
import io.druid.query.spec.SpecificSegmentSpec;
|
||||
|
@ -227,9 +240,8 @@ public class ServerManager implements QuerySegmentWalker
|
|||
|
||||
String dataSourceName;
|
||||
try {
|
||||
dataSourceName = ((TableDataSource)query.getDataSource()).getName();
|
||||
}
|
||||
catch (ClassCastException e) {
|
||||
dataSourceName = ((TableDataSource) query.getDataSource()).getName();
|
||||
} catch (ClassCastException e) {
|
||||
throw new UnsupportedOperationException("Subqueries are only supported in the broker");
|
||||
}
|
||||
|
||||
|
@ -313,9 +325,8 @@ public class ServerManager implements QuerySegmentWalker
|
|||
|
||||
String dataSourceName;
|
||||
try {
|
||||
dataSourceName = ((TableDataSource)query.getDataSource()).getName();
|
||||
}
|
||||
catch (ClassCastException e) {
|
||||
dataSourceName = ((TableDataSource) query.getDataSource()).getName();
|
||||
} catch (ClassCastException e) {
|
||||
throw new UnsupportedOperationException("Subqueries are only supported in the broker");
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,12 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.*;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Ordering;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.guava.FunctionalIterable;
|
||||
import com.metamx.common.guava.MergeIterable;
|
||||
|
@ -38,7 +43,17 @@ import io.druid.client.selector.ServerSelector;
|
|||
import io.druid.granularity.PeriodGranularity;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import io.druid.query.*;
|
||||
import io.druid.query.BySegmentResultValueClass;
|
||||
import io.druid.query.DataSource;
|
||||
import io.druid.query.Druids;
|
||||
import io.druid.query.MapQueryToolChestWarehouse;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryConfig;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.query.Result;
|
||||
import io.druid.query.SegmentDescriptor;
|
||||
import io.druid.query.TableDataSource;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.CountAggregatorFactory;
|
||||
import io.druid.query.aggregation.LongSumAggregatorFactory;
|
||||
|
@ -58,11 +73,19 @@ import io.druid.query.timeboundary.TimeBoundaryResultValue;
|
|||
import io.druid.query.timeseries.TimeseriesQuery;
|
||||
import io.druid.query.timeseries.TimeseriesQueryQueryToolChest;
|
||||
import io.druid.query.timeseries.TimeseriesResultValue;
|
||||
import io.druid.query.topn.*;
|
||||
import io.druid.query.topn.TopNQuery;
|
||||
import io.druid.query.topn.TopNQueryBuilder;
|
||||
import io.druid.query.topn.TopNQueryConfig;
|
||||
import io.druid.query.topn.TopNQueryQueryToolChest;
|
||||
import io.druid.query.topn.TopNResultValue;
|
||||
import io.druid.segment.TestHelper;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import io.druid.timeline.VersionedIntervalTimeline;
|
||||
import io.druid.timeline.partition.*;
|
||||
import io.druid.timeline.partition.NoneShardSpec;
|
||||
import io.druid.timeline.partition.PartitionChunk;
|
||||
import io.druid.timeline.partition.ShardSpec;
|
||||
import io.druid.timeline.partition.SingleElementPartitionChunk;
|
||||
import io.druid.timeline.partition.StringPartitionChunk;
|
||||
import org.easymock.Capture;
|
||||
import org.easymock.EasyMock;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -77,7 +100,13 @@ import org.junit.runners.Parameterized;
|
|||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/**
|
||||
|
@ -184,13 +213,13 @@ public class CachingClusteredClientTest
|
|||
public void testTimeseriesCaching() throws Exception
|
||||
{
|
||||
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource(DATA_SOURCE)
|
||||
.intervals(SEG_SPEC)
|
||||
.filters(DIM_FILTER)
|
||||
.granularity(GRANULARITY)
|
||||
.aggregators(AGGS)
|
||||
.postAggregators(POST_AGGS)
|
||||
.context(CONTEXT);
|
||||
.dataSource(DATA_SOURCE)
|
||||
.intervals(SEG_SPEC)
|
||||
.filters(DIM_FILTER)
|
||||
.granularity(GRANULARITY)
|
||||
.aggregators(AGGS)
|
||||
.postAggregators(POST_AGGS)
|
||||
.context(CONTEXT);
|
||||
|
||||
testQueryCaching(
|
||||
builder.build(),
|
||||
|
@ -235,9 +264,9 @@ public class CachingClusteredClientTest
|
|||
),
|
||||
client.run(
|
||||
builder.intervals("2011-01-01/2011-01-10")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -247,13 +276,13 @@ public class CachingClusteredClientTest
|
|||
public void testTimeseriesCachingTimeZone() throws Exception
|
||||
{
|
||||
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource(DATA_SOURCE)
|
||||
.intervals(SEG_SPEC)
|
||||
.filters(DIM_FILTER)
|
||||
.granularity(PT1H_TZ_GRANULARITY)
|
||||
.aggregators(AGGS)
|
||||
.postAggregators(POST_AGGS)
|
||||
.context(CONTEXT);
|
||||
.dataSource(DATA_SOURCE)
|
||||
.intervals(SEG_SPEC)
|
||||
.filters(DIM_FILTER)
|
||||
.granularity(PT1H_TZ_GRANULARITY)
|
||||
.aggregators(AGGS)
|
||||
.postAggregators(POST_AGGS)
|
||||
.context(CONTEXT);
|
||||
|
||||
testQueryCaching(
|
||||
builder.build(),
|
||||
|
@ -275,9 +304,9 @@ public class CachingClusteredClientTest
|
|||
),
|
||||
client.run(
|
||||
builder.intervals("2011-11-04/2011-11-08")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -286,18 +315,18 @@ public class CachingClusteredClientTest
|
|||
public void testDisableUseCache() throws Exception
|
||||
{
|
||||
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource(DATA_SOURCE)
|
||||
.intervals(SEG_SPEC)
|
||||
.filters(DIM_FILTER)
|
||||
.granularity(GRANULARITY)
|
||||
.aggregators(AGGS)
|
||||
.postAggregators(POST_AGGS);
|
||||
.dataSource(DATA_SOURCE)
|
||||
.intervals(SEG_SPEC)
|
||||
.filters(DIM_FILTER)
|
||||
.granularity(GRANULARITY)
|
||||
.aggregators(AGGS)
|
||||
.postAggregators(POST_AGGS);
|
||||
|
||||
testQueryCaching(
|
||||
1,
|
||||
true,
|
||||
builder.context(ImmutableMap.of("useCache", "false",
|
||||
"populateCache", "true")).build(),
|
||||
"populateCache", "true")).build(),
|
||||
new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000)
|
||||
);
|
||||
|
||||
|
@ -311,7 +340,7 @@ public class CachingClusteredClientTest
|
|||
1,
|
||||
false,
|
||||
builder.context(ImmutableMap.of("useCache", "false",
|
||||
"populateCache", "false")).build(),
|
||||
"populateCache", "false")).build(),
|
||||
new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000)
|
||||
);
|
||||
|
||||
|
@ -323,7 +352,7 @@ public class CachingClusteredClientTest
|
|||
1,
|
||||
false,
|
||||
builder.context(ImmutableMap.of("useCache", "true",
|
||||
"populateCache", "false")).build(),
|
||||
"populateCache", "false")).build(),
|
||||
new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000)
|
||||
);
|
||||
|
||||
|
@ -392,10 +421,10 @@ public class CachingClusteredClientTest
|
|||
),
|
||||
client.run(
|
||||
builder.intervals("2011-01-01/2011-01-10")
|
||||
.metric("imps")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
.metric("imps")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -437,10 +466,10 @@ public class CachingClusteredClientTest
|
|||
),
|
||||
client.run(
|
||||
builder.intervals("2011-11-04/2011-11-08")
|
||||
.metric("imps")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
.metric("imps")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -503,10 +532,10 @@ public class CachingClusteredClientTest
|
|||
),
|
||||
client.run(
|
||||
builder.intervals("2011-01-01/2011-01-10")
|
||||
.metric("imps")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
.metric("imps")
|
||||
.aggregators(RENAMED_AGGS)
|
||||
.postAggregators(RENAMED_POST_AGGS)
|
||||
.build()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -552,7 +581,8 @@ public class CachingClusteredClientTest
|
|||
);
|
||||
}
|
||||
|
||||
public void testQueryCaching(final Query query, Object... args) {
|
||||
public void testQueryCaching(final Query query, Object... args)
|
||||
{
|
||||
testQueryCaching(3, true, query, args);
|
||||
}
|
||||
|
||||
|
@ -607,8 +637,8 @@ public class CachingClusteredClientTest
|
|||
|
||||
|
||||
EasyMock.expect(serverView.getQueryRunner(server))
|
||||
.andReturn(expectations.getQueryRunner())
|
||||
.once();
|
||||
.andReturn(expectations.getQueryRunner())
|
||||
.once();
|
||||
|
||||
final Capture<? extends Query> capture = new Capture();
|
||||
queryCaptures.add(capture);
|
||||
|
@ -625,8 +655,8 @@ public class CachingClusteredClientTest
|
|||
}
|
||||
|
||||
EasyMock.expect(queryable.run(EasyMock.capture(capture)))
|
||||
.andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results))
|
||||
.once();
|
||||
.andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results))
|
||||
.once();
|
||||
|
||||
} else if (query instanceof TopNQuery) {
|
||||
List<String> segmentIds = Lists.newArrayList();
|
||||
|
@ -638,8 +668,8 @@ public class CachingClusteredClientTest
|
|||
results.add(expectation.getResults());
|
||||
}
|
||||
EasyMock.expect(queryable.run(EasyMock.capture(capture)))
|
||||
.andReturn(toQueryableTopNResults(segmentIds, intervals, results))
|
||||
.once();
|
||||
.andReturn(toQueryableTopNResults(segmentIds, intervals, results))
|
||||
.once();
|
||||
} else if (query instanceof SearchQuery) {
|
||||
List<String> segmentIds = Lists.newArrayList();
|
||||
List<Interval> intervals = Lists.newArrayList();
|
||||
|
@ -650,8 +680,8 @@ public class CachingClusteredClientTest
|
|||
results.add(expectation.getResults());
|
||||
}
|
||||
EasyMock.expect(queryable.run(EasyMock.capture(capture)))
|
||||
.andReturn(toQueryableSearchResults(segmentIds, intervals, results))
|
||||
.once();
|
||||
.andReturn(toQueryableSearchResults(segmentIds, intervals, results))
|
||||
.once();
|
||||
} else if (query instanceof TimeBoundaryQuery) {
|
||||
List<String> segmentIds = Lists.newArrayList();
|
||||
List<Interval> intervals = Lists.newArrayList();
|
||||
|
@ -662,8 +692,8 @@ public class CachingClusteredClientTest
|
|||
results.add(expectation.getResults());
|
||||
}
|
||||
EasyMock.expect(queryable.run(EasyMock.capture(capture)))
|
||||
.andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results))
|
||||
.once();
|
||||
.andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results))
|
||||
.once();
|
||||
} else {
|
||||
throw new ISE("Unknown query type[%s]", query.getClass());
|
||||
}
|
||||
|
@ -730,13 +760,12 @@ public class CachingClusteredClientTest
|
|||
// make sure all the queries were sent down as 'bySegment'
|
||||
for (Capture queryCapture : queryCaptures) {
|
||||
Query capturedQuery = (Query) queryCapture.getValue();
|
||||
if(expectBySegment) {
|
||||
if (expectBySegment) {
|
||||
Assert.assertEquals("true", capturedQuery.getContextValue("bySegment"));
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
Assert.assertTrue(
|
||||
capturedQuery.getContextValue("bySegment") == null ||
|
||||
capturedQuery.getContextValue("bySegment").equals("false")
|
||||
capturedQuery.getContextValue("bySegment").equals("false")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -791,7 +820,8 @@ public class CachingClusteredClientTest
|
|||
}
|
||||
timeline.add(queryIntervals.get(k), String.valueOf(k), chunk);
|
||||
}
|
||||
} return serverExpectationList;
|
||||
}
|
||||
return serverExpectationList;
|
||||
}
|
||||
|
||||
private Sequence<Result<TimeseriesResultValue>> toQueryableTimeseriesResults(
|
||||
|
@ -801,35 +831,35 @@ public class CachingClusteredClientTest
|
|||
Iterable<Iterable<Result<TimeseriesResultValue>>> results
|
||||
)
|
||||
{
|
||||
if(bySegment) {
|
||||
return Sequences.simple(
|
||||
FunctionalIterable
|
||||
.create(segmentIds)
|
||||
.trinaryTransform(
|
||||
intervals,
|
||||
results,
|
||||
new TrinaryFn<String, Interval, Iterable<Result<TimeseriesResultValue>>, Result<TimeseriesResultValue>>()
|
||||
{
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Result<TimeseriesResultValue> apply(
|
||||
final String segmentId,
|
||||
final Interval interval,
|
||||
final Iterable<Result<TimeseriesResultValue>> results
|
||||
)
|
||||
if (bySegment) {
|
||||
return Sequences.simple(
|
||||
FunctionalIterable
|
||||
.create(segmentIds)
|
||||
.trinaryTransform(
|
||||
intervals,
|
||||
results,
|
||||
new TrinaryFn<String, Interval, Iterable<Result<TimeseriesResultValue>>, Result<TimeseriesResultValue>>()
|
||||
{
|
||||
return new Result(
|
||||
results.iterator().next().getTimestamp(),
|
||||
new BySegmentResultValueClass(
|
||||
Lists.newArrayList(results),
|
||||
segmentId,
|
||||
interval
|
||||
)
|
||||
);
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Result<TimeseriesResultValue> apply(
|
||||
final String segmentId,
|
||||
final Interval interval,
|
||||
final Iterable<Result<TimeseriesResultValue>> results
|
||||
)
|
||||
{
|
||||
return new Result(
|
||||
results.iterator().next().getTimestamp(),
|
||||
new BySegmentResultValueClass(
|
||||
Lists.newArrayList(results),
|
||||
segmentId,
|
||||
interval
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
)
|
||||
);
|
||||
} else {
|
||||
return Sequences.simple(Iterables.concat(results));
|
||||
}
|
||||
|
@ -967,36 +997,36 @@ public class CachingClusteredClientTest
|
|||
}
|
||||
|
||||
private Iterable<BySegmentResultValueClass<TimeseriesResultValue>> makeBySegmentTimeResults
|
||||
(Object... objects)
|
||||
{
|
||||
if (objects.length % 5 != 0) {
|
||||
throw new ISE("makeTimeResults must be passed arguments in groups of 5, got[%d]", objects.length);
|
||||
}
|
||||
|
||||
List<BySegmentResultValueClass<TimeseriesResultValue>> retVal = Lists.newArrayListWithCapacity(objects.length / 5);
|
||||
for (int i = 0; i < objects.length; i += 5) {
|
||||
retVal.add(
|
||||
new BySegmentResultValueClass<TimeseriesResultValue>(
|
||||
Lists.newArrayList(
|
||||
new TimeseriesResultValue(
|
||||
ImmutableMap.of(
|
||||
"rows", objects[i + 1],
|
||||
"imps", objects[i + 2],
|
||||
"impers", objects[i + 2],
|
||||
"avg_imps_per_row",
|
||||
((Number) objects[i + 2]).doubleValue() / ((Number) objects[i + 1]).doubleValue()
|
||||
)
|
||||
)
|
||||
),
|
||||
(String)objects[i+3],
|
||||
(Interval)objects[i+4]
|
||||
|
||||
)
|
||||
);
|
||||
}
|
||||
return retVal;
|
||||
(Object... objects)
|
||||
{
|
||||
if (objects.length % 5 != 0) {
|
||||
throw new ISE("makeTimeResults must be passed arguments in groups of 5, got[%d]", objects.length);
|
||||
}
|
||||
|
||||
List<BySegmentResultValueClass<TimeseriesResultValue>> retVal = Lists.newArrayListWithCapacity(objects.length / 5);
|
||||
for (int i = 0; i < objects.length; i += 5) {
|
||||
retVal.add(
|
||||
new BySegmentResultValueClass<TimeseriesResultValue>(
|
||||
Lists.newArrayList(
|
||||
new TimeseriesResultValue(
|
||||
ImmutableMap.of(
|
||||
"rows", objects[i + 1],
|
||||
"imps", objects[i + 2],
|
||||
"impers", objects[i + 2],
|
||||
"avg_imps_per_row",
|
||||
((Number) objects[i + 2]).doubleValue() / ((Number) objects[i + 1]).doubleValue()
|
||||
)
|
||||
)
|
||||
),
|
||||
(String) objects[i + 3],
|
||||
(Interval) objects[i + 4]
|
||||
|
||||
)
|
||||
);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private Iterable<Result<TimeseriesResultValue>> makeRenamedTimeResults
|
||||
(Object... objects)
|
||||
{
|
||||
|
@ -1129,13 +1159,13 @@ public class CachingClusteredClientTest
|
|||
return new CachingClusteredClient(
|
||||
new MapQueryToolChestWarehouse(
|
||||
ImmutableMap.<Class<? extends Query>, QueryToolChest>builder()
|
||||
.put(
|
||||
TimeseriesQuery.class,
|
||||
new TimeseriesQueryQueryToolChest(new QueryConfig())
|
||||
)
|
||||
.put(TopNQuery.class, new TopNQueryQueryToolChest(new TopNQueryConfig()))
|
||||
.put(SearchQuery.class, new SearchQueryQueryToolChest(new SearchQueryConfig()))
|
||||
.build()
|
||||
.put(
|
||||
TimeseriesQuery.class,
|
||||
new TimeseriesQueryQueryToolChest(new QueryConfig())
|
||||
)
|
||||
.put(TopNQuery.class, new TopNQueryQueryToolChest(new TopNQueryConfig()))
|
||||
.put(SearchQuery.class, new SearchQueryQueryToolChest(new SearchQueryConfig()))
|
||||
.build()
|
||||
),
|
||||
new TimelineServerView()
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue