Support renaming of outputName for cached select and search query results (#3395)

* support renaming of outputName for cached select and search queries

* rebase and resolve conflicts

* rollback CacheStrategy interface change

* updated based on review comments
This commit is contained in:
Keuntae Park 2016-09-21 00:19:14 +09:00 committed by Gian Merlino
parent 3175e17a3b
commit 54ec4dd584
5 changed files with 386 additions and 65 deletions

View File

@ -167,7 +167,7 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest<SegmentAn
}
@Override
public CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> getCacheStrategy(SegmentMetadataQuery query)
public CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery> getCacheStrategy(final SegmentMetadataQuery query)
{
return new CacheStrategy<SegmentAnalysis, SegmentAnalysis, SegmentMetadataQuery>()
{

View File

@ -22,9 +22,10 @@ package io.druid.query.search;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.collect.ImmutableList;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.primitives.Ints;
import com.google.inject.Inject;
@ -54,7 +55,7 @@ import org.joda.time.DateTime;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -132,10 +133,26 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
}
@Override
public CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> getCacheStrategy(SearchQuery query)
public CacheStrategy<Result<SearchResultValue>, Object, SearchQuery> getCacheStrategy(final SearchQuery query)
{
return new CacheStrategy<Result<SearchResultValue>, Object, SearchQuery>()
{
private final List<DimensionSpec> dimensionSpecs =
query.getDimensions() != null ? query.getDimensions() : Collections.<DimensionSpec>emptyList();
private final List<String> dimOutputNames = dimensionSpecs.size() > 0 ?
Lists.transform(
dimensionSpecs,
new Function<DimensionSpec, String>() {
@Override
public String apply(DimensionSpec input) {
return input.getOutputName();
}
}
)
:
Collections.<String>emptyList();
@Override
public byte[] computeCacheKey(SearchQuery query)
{
@ -144,15 +161,13 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
final byte[] querySpecBytes = query.getQuery().getCacheKey();
final byte[] granularityBytes = query.getGranularity().cacheKey();
final Collection<DimensionSpec> dimensions = query.getDimensions() == null
? ImmutableList.<DimensionSpec>of()
: query.getDimensions();
final byte[][] dimensionsBytes = new byte[dimensions.size()][];
final List<DimensionSpec> dimensionSpecs =
query.getDimensions() != null ? query.getDimensions() : Collections.<DimensionSpec>emptyList();
final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][];
int dimensionsBytesSize = 0;
int index = 0;
for (DimensionSpec dimension : dimensions) {
dimensionsBytes[index] = dimension.getCacheKey();
for (DimensionSpec dimensionSpec : dimensionSpecs) {
dimensionsBytes[index] = dimensionSpec.getCacheKey();
dimensionsBytesSize += dimensionsBytes[index].length;
++index;
}
@ -193,7 +208,9 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
@Override
public Object apply(Result<SearchResultValue> input)
{
return Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue());
return dimensionSpecs.size() > 0
? Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue(), dimOutputNames)
: Lists.newArrayList(input.getTimestamp().getMillis(), input.getValue());
}
};
}
@ -208,36 +225,98 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
public Result<SearchResultValue> apply(Object input)
{
List<Object> result = (List<Object>) input;
boolean needsRename = false;
final Map<String, String> outputNameMap = Maps.newHashMap();
if (hasOutputName(result)) {
List<String> cachedOutputNames = (List) result.get(2);
Preconditions.checkArgument(cachedOutputNames.size() == dimOutputNames.size(),
"cache hit, but number of dimensions mismatch");
needsRename = false;
for (int idx = 0; idx < cachedOutputNames.size(); idx++) {
String cachedOutputName = cachedOutputNames.get(idx);
String outputName = dimOutputNames.get(idx);
if (!cachedOutputName.equals(outputName)) {
needsRename = true;
}
outputNameMap.put(cachedOutputName, outputName);
}
}
return new Result<>(
new DateTime(((Number) result.get(0)).longValue()),
new SearchResultValue(
Lists.transform(
(List) result.get(1),
new Function<Object, SearchHit>()
{
@Override
public SearchHit apply(@Nullable Object input)
{
if (input instanceof Map) {
return new SearchHit(
return !needsRename
? new Result<>(
new DateTime(((Number) result.get(0)).longValue()),
new SearchResultValue(
Lists.transform(
(List) result.get(1),
new Function<Object, SearchHit>()
{
@Override
public SearchHit apply(@Nullable Object input)
{
if (input instanceof Map) {
return new SearchHit(
(String) ((Map) input).get("dimension"),
(String) ((Map) input).get("value"),
(Integer) ((Map) input).get("count")
);
} else if (input instanceof SearchHit) {
return (SearchHit) input;
} else {
throw new IAE("Unknown format [%s]", input.getClass());
);
} else if (input instanceof SearchHit) {
return (SearchHit) input;
} else {
throw new IAE("Unknown format [%s]", input.getClass());
}
}
}
}
}
)
)
)
);
: new Result<>(
new DateTime(((Number) result.get(0)).longValue()),
new SearchResultValue(
Lists.transform(
(List) result.get(1),
new Function<Object, SearchHit>()
{
@Override
public SearchHit apply(@Nullable Object input)
{
String dim = null;
String val = null;
Integer cnt = null;
if (input instanceof Map) {
dim = outputNameMap.get((String)((Map) input).get("dimension"));
val = (String) ((Map) input).get("value");
cnt = (Integer) ((Map) input).get("count");
} else if (input instanceof SearchHit) {
SearchHit cached = (SearchHit)input;
dim = outputNameMap.get(cached.getDimension());
val = cached.getValue();
cnt = cached.getCount();
} else {
throw new IAE("Unknown format [%s]", input.getClass());
}
return new SearchHit(dim, val, cnt);
}
}
)
)
)
;
}
};
}
private boolean hasOutputName(List<Object> cachedEntry)
{
/*
* cached entry is list of two or three objects
* 1. timestamp
* 2. SearchResultValue
* 3. outputName of each dimension (optional)
*
* if a cached entry has three objects, dimension name of SearchResultValue should be check if rename is needed
*/
return cachedEntry.size() == 3;
}
};
}

View File

@ -23,13 +23,13 @@ import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.metamx.common.ISE;
import com.metamx.common.StringUtils;
import com.metamx.common.guava.Comparators;
import com.metamx.common.guava.Sequence;
@ -141,8 +141,24 @@ public class SelectQueryQueryToolChest extends QueryToolChest<Result<SelectResul
@Override
public CacheStrategy<Result<SelectResultValue>, Object, SelectQuery> getCacheStrategy(final SelectQuery query)
{
return new CacheStrategy<Result<SelectResultValue>, Object, SelectQuery>()
{
private final List<DimensionSpec> dimensionSpecs =
query.getDimensions() != null ? query.getDimensions() : Collections.<DimensionSpec>emptyList();
private final List<String> dimOutputNames = dimensionSpecs.size() > 0 ?
Lists.transform(
dimensionSpecs,
new Function<DimensionSpec, String>() {
@Override
public String apply(DimensionSpec input) {
return input.getOutputName();
}
}
)
:
Collections.<String>emptyList();
@Override
public byte[] computeCacheKey(SelectQuery query)
{
@ -150,11 +166,8 @@ public class SelectQueryQueryToolChest extends QueryToolChest<Result<SelectResul
final byte[] filterBytes = dimFilter == null ? new byte[]{} : dimFilter.getCacheKey();
final byte[] granularityBytes = query.getGranularity().cacheKey();
List<DimensionSpec> dimensionSpecs = query.getDimensions();
if (dimensionSpecs == null) {
dimensionSpecs = Collections.emptyList();
}
final List<DimensionSpec> dimensionSpecs =
query.getDimensions() != null ? query.getDimensions() : Collections.<DimensionSpec>emptyList();
final byte[][] dimensionsBytes = new byte[dimensionSpecs.size()][];
int dimensionsBytesSize = 0;
int index = 0;
@ -217,6 +230,16 @@ public class SelectQueryQueryToolChest extends QueryToolChest<Result<SelectResul
@Override
public Object apply(final Result<SelectResultValue> input)
{
if (!dimOutputNames.isEmpty()) {
return Arrays.asList(
input.getTimestamp().getMillis(),
input.getValue().getPagingIdentifiers(),
input.getValue().getDimensions(),
input.getValue().getMetrics(),
input.getValue().getEvents(),
dimOutputNames
);
}
return Arrays.asList(
input.getTimestamp().getMillis(),
input.getValue().getPagingIdentifiers(),
@ -243,29 +266,43 @@ public class SelectQueryQueryToolChest extends QueryToolChest<Result<SelectResul
DateTime timestamp = granularity.toDateTime(((Number) resultIter.next()).longValue());
return new Result<SelectResultValue>(
Map<String, Integer> pageIdentifier = jsonMapper.convertValue(
resultIter.next(), new TypeReference<Map<String, Integer>>() {}
);
Set<String> dimensionSet = jsonMapper.convertValue(
resultIter.next(), new TypeReference<Set<String>>() {}
);
Set<String> metricSet = jsonMapper.convertValue(
resultIter.next(), new TypeReference<Set<String>>() {}
);
List<EventHolder> eventHolders = jsonMapper.convertValue(
resultIter.next(), new TypeReference<List<EventHolder>>() {}
);
// check the condition that outputName of cached result should be updated
if (resultIter.hasNext()) {
List<String> cachedOutputNames = (List<String>) resultIter.next();
Preconditions.checkArgument(cachedOutputNames.size() == dimOutputNames.size(),
"Cache hit but different number of dimensions??");
for (int idx = 0; idx < dimOutputNames.size(); idx++) {
if (!cachedOutputNames.get(idx).equals(dimOutputNames.get(idx))) {
// rename outputName in the EventHolder
for (EventHolder eventHolder: eventHolders) {
Object obj = eventHolder.getEvent().remove(cachedOutputNames.get(idx));
if (obj != null) {
eventHolder.getEvent().put(dimOutputNames.get(idx), obj);
}
}
}
}
}
return new Result<>(
timestamp,
new SelectResultValue(
(Map<String, Integer>) jsonMapper.convertValue(
resultIter.next(), new TypeReference<Map<String, Integer>>()
{
}
),
(Set<String>) jsonMapper.convertValue(
resultIter.next(), new TypeReference<Set<String>>()
{
}
),
(Set<String>) jsonMapper.convertValue(
resultIter.next(), new TypeReference<Set<String>>()
{
}
),
(List<EventHolder>) jsonMapper.convertValue(
resultIter.next(), new TypeReference<List<EventHolder>>()
{
}
)
pageIdentifier,
dimensionSet,
metricSet,
eventHolders
)
);
}

View File

@ -129,7 +129,7 @@ public class TimeBoundaryQueryQueryToolChest
}
@Override
public CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery> getCacheStrategy(TimeBoundaryQuery query)
public CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery> getCacheStrategy(final TimeBoundaryQuery query)
{
return new CacheStrategy<Result<TimeBoundaryResultValue>, Object, TimeBoundaryQuery>()
{

View File

@ -1092,7 +1092,7 @@ public class CachingClusteredClientTest
.granularity(GRANULARITY)
.limit(1000)
.intervals(SEG_SPEC)
.dimensions(Arrays.asList("a_dim"))
.dimensions(Arrays.asList(TOP_DIM))
.query("how")
.context(CONTEXT);
@ -1100,13 +1100,14 @@ public class CachingClusteredClientTest
client,
builder.build(),
new Interval("2011-01-01/2011-01-02"),
makeSearchResults(new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4),
makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4),
new Interval("2011-01-02/2011-01-03"),
makeSearchResults(new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4),
makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4),
new Interval("2011-01-05/2011-01-10"),
makeSearchResults(
TOP_DIM,
new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
@ -1116,6 +1117,7 @@ public class CachingClusteredClientTest
new Interval("2011-01-05/2011-01-10"),
makeSearchResults(
TOP_DIM,
new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
@ -1133,6 +1135,7 @@ public class CachingClusteredClientTest
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(
makeSearchResults(
TOP_DIM,
new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4,
new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4,
new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
@ -1154,6 +1157,106 @@ public class CachingClusteredClientTest
);
}
@Test
public void testSearchCachingRenamedOutput() throws Exception
{
final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder()
.dataSource(DATA_SOURCE)
.filters(DIM_FILTER)
.granularity(GRANULARITY)
.limit(1000)
.intervals(SEG_SPEC)
.dimensions(Arrays.asList(TOP_DIM))
.query("how")
.context(CONTEXT);
testQueryCaching(
client,
builder.build(),
new Interval("2011-01-01/2011-01-02"),
makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4),
new Interval("2011-01-02/2011-01-03"),
makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4),
new Interval("2011-01-05/2011-01-10"),
makeSearchResults(
TOP_DIM,
new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4,
new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4
),
new Interval("2011-01-05/2011-01-10"),
makeSearchResults(
TOP_DIM,
new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4,
new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4
)
);
QueryRunner runner = new FinalizeResultsQueryRunner(
client, new SearchQueryQueryToolChest(
new SearchQueryConfig(),
QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()
)
);
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(
makeSearchResults(
TOP_DIM,
new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4,
new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4,
new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4,
new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4,
new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4,
new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4
),
runner.run(
builder.intervals("2011-01-01/2011-01-10")
.build(),
context
)
);
TestHelper.assertExpectedResults(
makeSearchResults(
"new_dim",
new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4,
new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4,
new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4,
new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4,
new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4,
new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4,
new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4,
new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4,
new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4
),
runner.run(
builder.intervals("2011-01-01/2011-01-10")
.dimensions(new DefaultDimensionSpec(
TOP_DIM,
"new_dim"
))
.build(),
context
)
);
}
@Test
public void testSelectCaching() throws Exception
{
@ -1226,6 +1329,108 @@ public class CachingClusteredClientTest
);
}
@Test
public void testSelectCachingRenamedOutputName() throws Exception
{
final Set<String> dimensions = Sets.<String>newHashSet("a");
final Set<String> metrics = Sets.<String>newHashSet("rows");
Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder()
.dataSource(DATA_SOURCE)
.intervals(SEG_SPEC)
.filters(DIM_FILTER)
.granularity(GRANULARITY)
.dimensions(Arrays.asList("a"))
.metrics(Arrays.asList("rows"))
.pagingSpec(new PagingSpec(null, 3))
.context(CONTEXT);
testQueryCaching(
client,
builder.build(),
new Interval("2011-01-01/2011-01-02"),
makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)),
new Interval("2011-01-02/2011-01-03"),
makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)),
new Interval("2011-01-05/2011-01-10"),
makeSelectResults(
dimensions, metrics,
new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5),
new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6),
new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7),
new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8),
new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9)
),
new Interval("2011-01-05/2011-01-10"),
makeSelectResults(
dimensions, metrics,
new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5),
new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6),
new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7),
new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8),
new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)
)
);
QueryRunner runner = new FinalizeResultsQueryRunner(
client,
new SelectQueryQueryToolChest(
jsonMapper,
QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()
)
);
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(
makeSelectResults(
dimensions, metrics,
new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1),
new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5),
new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5),
new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5),
new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6),
new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6),
new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7),
new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7),
new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8),
new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8),
new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9),
new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)
),
runner.run(
builder.intervals("2011-01-01/2011-01-10")
.build(),
context
)
);
TestHelper.assertExpectedResults(
makeSelectResults(
dimensions, metrics,
new DateTime("2011-01-01"), ImmutableMap.of("a2", "b", "rows", 1),
new DateTime("2011-01-02"), ImmutableMap.of("a2", "c", "rows", 5),
new DateTime("2011-01-05"), ImmutableMap.of("a2", "d", "rows", 5),
new DateTime("2011-01-05T01"), ImmutableMap.of("a2", "d", "rows", 5),
new DateTime("2011-01-06"), ImmutableMap.of("a2", "e", "rows", 6),
new DateTime("2011-01-06T01"), ImmutableMap.of("a2", "e", "rows", 6),
new DateTime("2011-01-07"), ImmutableMap.of("a2", "f", "rows", 7),
new DateTime("2011-01-07T01"), ImmutableMap.of("a2", "f", "rows", 7),
new DateTime("2011-01-08"), ImmutableMap.of("a2", "g", "rows", 8),
new DateTime("2011-01-08T01"), ImmutableMap.of("a2", "g", "rows", 8),
new DateTime("2011-01-09"), ImmutableMap.of("a2", "h", "rows", 9),
new DateTime("2011-01-09T01"), ImmutableMap.of("a2", "h", "rows", 9)
),
runner.run(
builder.intervals("2011-01-01/2011-01-10")
.dimensionSpecs(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("a", "a2")))
.build(),
context
)
);
}
@Test
public void testGroupByCaching() throws Exception
{
@ -2411,7 +2616,7 @@ public class CachingClusteredClientTest
}
private Iterable<Result<SearchResultValue>> makeSearchResults
(Object... objects)
(String dim, Object... objects)
{
List<Result<SearchResultValue>> retVal = Lists.newArrayList();
int index = 0;
@ -2420,7 +2625,7 @@ public class CachingClusteredClientTest
List<SearchHit> values = Lists.newArrayList();
while (index < objects.length && !(objects[index] instanceof DateTime)) {
values.add(new SearchHit(TOP_DIM, objects[index++].toString(), (Integer) objects[index++]));
values.add(new SearchHit(dim, objects[index++].toString(), (Integer) objects[index++]));
}
retVal.add(new Result<>(timestamp, new SearchResultValue(values)));