Add option for select query to get next page without modifying returned paging identifiers

This commit is contained in:
navis.ryu 2016-03-02 13:39:00 +09:00
parent 2fc5918e69
commit 29bb00535b
5 changed files with 262 additions and 202 deletions

View File

@ -167,5 +167,15 @@ This can be used with the next query's pagingSpec:
"pagingSpec":{"pagingIdentifiers": {"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9" : 5}, "threshold":5}
}
```
Note that in the second query, an offset is specified and that it is 1 greater than the largest offset found in the initial results. To return the next "page", this offset must be incremented by 1 (should be decremented by 1 for descending query), with each new query. When an empty results set is received, the very last page has been returned.
Note that in the second query, an offset is specified and that it is 1 greater than the largest offset found in the initial results. To return the next "page", this offset must be incremented by 1 (should be decremented by 1 for descending query), with each new query, but with option `fromNext` enabled, this operation is not needed. When an empty results set is received, the very last page has been returned.
`fromNext` options is in pagingSpec:
```json
{
...
"pagingSpec":{"pagingIdentifiers": {}, "threshold":5, "fromNext": true}
}
```

View File

@ -21,28 +21,60 @@ package io.druid.query.select;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Maps;
import com.google.common.primitives.Ints;
import com.metamx.common.StringUtils;
import java.nio.ByteBuffer;
import java.util.LinkedHashMap;
import java.util.Map;
/**
*/
public class PagingSpec
{
private final LinkedHashMap<String, Integer> pagingIdentifiers;
public static PagingSpec newSpec(int threshold)
{
return new PagingSpec(null, threshold);
}
public static Map<String, Integer> merge(Iterable<Map<String, Integer>> cursors)
{
Map<String, Integer> next = Maps.newHashMap();
for (Map<String, Integer> cursor : cursors) {
for (Map.Entry<String, Integer> entry : cursor.entrySet()) {
next.put(entry.getKey(), entry.getValue());
}
}
return next;
}
public static Map<String, Integer> next(Map<String, Integer> cursor, boolean descending)
{
for (Map.Entry<String, Integer> entry : cursor.entrySet()) {
entry.setValue(descending ? entry.getValue() - 1 : entry.getValue() + 1);
}
return cursor;
}
private final Map<String, Integer> pagingIdentifiers;
private final int threshold;
private final boolean fromNext;
@JsonCreator
public PagingSpec(
@JsonProperty("pagingIdentifiers") LinkedHashMap<String, Integer> pagingIdentifiers,
@JsonProperty("threshold") int threshold
@JsonProperty("pagingIdentifiers") Map<String, Integer> pagingIdentifiers,
@JsonProperty("threshold") int threshold,
@JsonProperty("fromNext") boolean fromNext
)
{
this.pagingIdentifiers = pagingIdentifiers == null ? new LinkedHashMap<String, Integer>() : pagingIdentifiers;
this.pagingIdentifiers = pagingIdentifiers == null ? Maps.<String, Integer>newHashMap() : pagingIdentifiers;
this.threshold = threshold;
this.fromNext = fromNext;
}
public PagingSpec(Map<String, Integer> pagingIdentifiers, int threshold)
{
this(pagingIdentifiers, threshold, false);
}
@JsonProperty
@ -57,6 +89,12 @@ public class PagingSpec
return threshold;
}
@JsonProperty
public boolean isFromNext()
{
return fromNext;
}
public byte[] getCacheKey()
{
final byte[][] pagingKeys = new byte[pagingIdentifiers.size()][];
@ -75,7 +113,7 @@ public class PagingSpec
final byte[] thresholdBytes = ByteBuffer.allocate(Ints.BYTES).putInt(threshold).array();
final ByteBuffer queryCacheKey = ByteBuffer.allocate(pagingKeysSize + pagingValuesSize + thresholdBytes.length);
final ByteBuffer queryCacheKey = ByteBuffer.allocate(pagingKeysSize + pagingValuesSize + thresholdBytes.length + 1);
for (byte[] pagingKey : pagingKeys) {
queryCacheKey.put(pagingKey);
@ -86,22 +124,37 @@ public class PagingSpec
}
queryCacheKey.put(thresholdBytes);
queryCacheKey.put(isFromNext() ? (byte) 0x01 : 0x00);
return queryCacheKey.array();
}
public PagingOffset getOffset(String identifier, boolean descending)
{
Integer offset = pagingIdentifiers.get(identifier);
if (offset == null) {
offset = PagingOffset.toOffset(0, descending);
} else if (fromNext) {
offset = descending ? offset - 1 : offset + 1;
}
return PagingOffset.of(offset, threshold);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (!(o instanceof PagingSpec)) {
if (o == null || getClass() != o.getClass()) {
return false;
}
PagingSpec that = (PagingSpec) o;
if (fromNext != that.fromNext) {
return false;
}
if (threshold != that.threshold) {
return false;
}
@ -117,6 +170,7 @@ public class PagingSpec
{
int result = pagingIdentifiers.hashCode();
result = 31 * result + threshold;
result = 31 * result + (fromNext ? 1 : 0);
return result;
}
@ -126,16 +180,7 @@ public class PagingSpec
return "PagingSpec{" +
"pagingIdentifiers=" + pagingIdentifiers +
", threshold=" + threshold +
", fromNext=" + fromNext +
'}';
}
public PagingOffset getOffset(String identifier, boolean descending)
{
Integer offset = pagingIdentifiers.get(identifier);
if (offset == null) {
offset = PagingOffset.toOffset(0, descending);
}
return PagingOffset.of(offset, threshold);
}
}

View File

@ -21,7 +21,6 @@ package io.druid.query.select;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.CharSource;
import com.metamx.common.guava.Sequences;
import io.druid.granularity.QueryGranularity;
@ -53,14 +52,17 @@ import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
*/
@RunWith(Parameterized.class)
public class MultiSegmentSelectQueryTest
{
private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest(
@ -188,51 +190,52 @@ public class MultiSegmentSelectQueryTest
IOUtils.closeQuietly(segment_override);
}
private final Druids.SelectQueryBuilder builder =
Druids.newSelectQueryBuilder()
@Parameterized.Parameters(name = "fromNext={0}")
public static Iterable<Object[]> constructorFeeder() throws IOException
{
return QueryRunnerTestHelper.cartesian(Arrays.asList(false, true));
}
private final boolean fromNext;
public MultiSegmentSelectQueryTest(boolean fromNext)
{
this.fromNext = fromNext;
}
private Druids.SelectQueryBuilder newBuilder()
{
return Druids.newSelectQueryBuilder()
.dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource))
.intervals(SelectQueryRunnerTest.I_0112_0114)
.granularity(QueryRunnerTestHelper.allGran)
.dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.dimensions))
.pagingSpec(new PagingSpec(null, 3));
.pagingSpec(PagingSpec.newSpec(3));
}
@Test
public void testAllGranularityAscending()
public void testAllGranularity()
{
SelectQuery query = builder.build();
for (int[] expected : new int[][]{
runAllGranularityTest(
newBuilder().build(),
new int[][]{
{2, -1, -1, -1, 3}, {3, 1, -1, -1, 3}, {-1, 3, 0, -1, 3}, {-1, -1, 3, -1, 3}, {-1, -1, 4, 1, 3},
{-1, -1, -1, 4, 3}, {-1, -1, -1, 7, 3}, {-1, -1, -1, 10, 3}, {-1, -1, -1, 12, 2}, {-1, -1, -1, 13, 0}
}) {
List<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, ImmutableMap.of()),
Lists.<Result<SelectResultValue>>newArrayList()
}
);
Assert.assertEquals(1, results.size());
SelectResultValue value = results.get(0).getValue();
Map<String, Integer> pagingIdentifiers = value.getPagingIdentifiers();
for (int i = 0; i < expected.length - 1; i++) {
if (expected[i] >= 0) {
Assert.assertEquals(expected[i], pagingIdentifiers.get(segmentIdentifiers.get(i)).intValue());
}
}
Assert.assertEquals(expected[expected.length - 1], value.getEvents().size());
query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers));
}
}
@Test
public void testAllGranularityDescending()
{
SelectQuery query = builder.descending(true).build();
for (int[] expected : new int[][]{
runAllGranularityTest(
newBuilder().descending(true).build(),
new int[][]{
{0, 0, 0, -3, 3}, {0, 0, 0, -6, 3}, {0, 0, 0, -9, 3}, {0, 0, 0, -12, 3}, {0, 0, -2, -13, 3},
{0, 0, -5, 0, 3}, {0, -3, 0, 0, 3}, {-2, -4, 0, 0, 3}, {-4, 0, 0, 0, 2}, {-5, 0, 0, 0, 0}
}) {
}
);
}
private void runAllGranularityTest(SelectQuery query, int[][] expectedOffsets)
{
for (int[] expected : expectedOffsets) {
List<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, ImmutableMap.of()),
Lists.<Result<SelectResultValue>>newArrayList()
@ -242,59 +245,44 @@ public class MultiSegmentSelectQueryTest
SelectResultValue value = results.get(0).getValue();
Map<String, Integer> pagingIdentifiers = value.getPagingIdentifiers();
for (int i = 0; i < expected.length - 1; i++) {
if (expected[i] < 0) {
Assert.assertEquals(expected[i], pagingIdentifiers.get(segmentIdentifiers.get(i)).intValue());
}
}
Assert.assertEquals(expected[expected.length - 1], value.getEvents().size());
Map<String, Integer> merged = PagingSpec.merge(Arrays.asList(pagingIdentifiers));
query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers));
for (int i = 0; i < 4; i++) {
if (query.isDescending() ^ expected[i] >= 0) {
Assert.assertEquals(
expected[i], pagingIdentifiers.get(segmentIdentifiers.get(i)).intValue()
);
}
}
Assert.assertEquals(expected[4], value.getEvents().size());
query = query.withPagingSpec(toNextCursor(merged, query, 3));
}
}
@Test
public void testDayGranularityAscending()
public void testDayGranularity()
{
SelectQuery query = builder.granularity(QueryRunnerTestHelper.dayGran).build();
for (int[] expected : new int[][]{
runDayGranularityTest(
newBuilder().granularity(QueryRunnerTestHelper.dayGran).build(),
new int[][]{
{2, -1, -1, 2, 3, 0, 0, 3}, {3, 1, -1, 5, 1, 2, 0, 3}, {-1, 3, 0, 8, 0, 2, 1, 3},
{-1, -1, 3, 11, 0, 0, 3, 3}, {-1, -1, 4, 12, 0, 0, 1, 1}, {-1, -1, 5, 13, 0, 0, 0, 0}
}) {
List<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, ImmutableMap.of()),
Lists.<Result<SelectResultValue>>newArrayList()
}
);
Assert.assertEquals(2, results.size());
SelectResultValue value0 = results.get(0).getValue();
SelectResultValue value1 = results.get(1).getValue();
Map<String, Integer> pagingIdentifiers0 = value0.getPagingIdentifiers();
Map<String, Integer> pagingIdentifiers1 = value1.getPagingIdentifiers();
for (int i = 0; i < 4; i++) {
if (expected[i] >= 0) {
Map<String, Integer> paging = i < 3 ? pagingIdentifiers0 : pagingIdentifiers1;
Assert.assertEquals(expected[i], paging.get(segmentIdentifiers.get(i)).intValue());
}
}
query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers0, pagingIdentifiers1));
}
}
@Test
public void testDayGranularityDescending()
{
QueryGranularity granularity = QueryRunnerTestHelper.dayGran;
SelectQuery query = builder.granularity(granularity).descending(true).build();
for (int[] expected : new int[][]{
runDayGranularityTest(
newBuilder().granularity(QueryRunnerTestHelper.dayGran).descending(true).build(),
new int[][]{
{0, 0, -3, -3, 0, 0, 3, 3}, {0, -1, -5, -6, 0, 1, 2, 3}, {0, -4, 0, -9, 0, 3, 0, 3},
{-3, 0, 0, -12, 3, 0, 0, 3}, {-4, 0, 0, -13, 1, 0, 0, 1}, {-5, 0, 0, -14, 0, 0, 0, 0}
}) {
}
);
}
private void runDayGranularityTest(SelectQuery query, int[][] expectedOffsets)
{
for (int[] expected : expectedOffsets) {
List<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, ImmutableMap.of()),
Lists.<Result<SelectResultValue>>newArrayList()
@ -307,26 +295,23 @@ public class MultiSegmentSelectQueryTest
Map<String, Integer> pagingIdentifiers0 = value0.getPagingIdentifiers();
Map<String, Integer> pagingIdentifiers1 = value1.getPagingIdentifiers();
Map<String, Integer> merged = PagingSpec.merge(Arrays.asList(pagingIdentifiers0, pagingIdentifiers1));
for (int i = 0; i < 4; i++) {
if (expected[i] < 0) {
Map<String, Integer> paging = i < 3 ? pagingIdentifiers1 : pagingIdentifiers0;
Assert.assertEquals(expected[i], paging.get(segmentIdentifiers.get(i)).intValue());
if (query.isDescending() ^ expected[i] >= 0) {
Assert.assertEquals(expected[i], merged.get(segmentIdentifiers.get(i)).intValue());
}
}
query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers0, pagingIdentifiers1));
query = query.withPagingSpec(toNextCursor(merged, query, 3));
}
}
@SafeVarargs
private final PagingSpec toNextPager(int threshold, boolean descending, Map<String, Integer>... pagers)
private PagingSpec toNextCursor(Map<String, Integer> merged, SelectQuery query, int threshold)
{
LinkedHashMap<String, Integer> next = Maps.newLinkedHashMap();
for (Map<String, Integer> pager : pagers) {
for (Map.Entry<String, Integer> entry : pager.entrySet()) {
next.put(entry.getKey(), descending ? entry.getValue() - 1 : entry.getValue() + 1);
if (!fromNext) {
merged = PagingSpec.next(merged, query.isDescending());
}
}
return new PagingSpec(next, threshold);
return new PagingSpec(merged, threshold, fromNext);
}
}

View File

@ -28,6 +28,7 @@ import com.google.common.collect.ObjectArrays;
import com.metamx.common.ISE;
import com.metamx.common.guava.Sequences;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.Druids;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.Result;
@ -53,7 +54,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@ -126,20 +126,24 @@ public class SelectQueryRunnerTest
this.descending = descending;
}
private Druids.SelectQueryBuilder newTestQuery() {
return Druids.newSelectQueryBuilder()
.dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource))
.dimensionSpecs(DefaultDimensionSpec.toSpec(Arrays.<String>asList()))
.metrics(Arrays.<String>asList())
.intervals(QueryRunnerTestHelper.fullOnInterval)
.granularity(QueryRunnerTestHelper.allGran)
.pagingSpec(PagingSpec.newSpec(3))
.descending(descending);
}
@Test
public void testFullOnSelect()
{
SelectQuery query = new SelectQuery(
new TableDataSource(QueryRunnerTestHelper.dataSource),
I_0112_0114,
descending,
null,
QueryRunnerTestHelper.allGran,
DefaultDimensionSpec.toSpec(Arrays.<String>asList()),
Arrays.<String>asList(),
new PagingSpec(null, 3),
null
);
SelectQuery query = newTestQuery()
.intervals(I_0112_0114)
.build();
HashMap<String, Object> context = new HashMap<String, Object>();
Iterable<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, context),
@ -155,6 +159,48 @@ public class SelectQueryRunnerTest
verify(expectedResults, results);
}
@Test
public void testSequentialPaging()
{
int[] asc = {2, 5, 8, 11, 14, 17, 20, 23, 25};
int[] dsc = {-3, -6, -9, -12, -15, -18, -21, -24, -26};
int[] expected = descending ? dsc : asc;
SelectQuery query = newTestQuery().intervals(I_0112_0114).build();
for (int offset : expected) {
List<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, ImmutableMap.of()),
Lists.<Result<SelectResultValue>>newArrayList()
);
Assert.assertEquals(1, results.size());
SelectResultValue result = results.get(0).getValue();
Map<String, Integer> pagingIdentifiers = result.getPagingIdentifiers();
Assert.assertEquals(offset, pagingIdentifiers.get(QueryRunnerTestHelper.segmentId).intValue());
Map<String, Integer> next = PagingSpec.next(pagingIdentifiers, descending);
query = query.withPagingSpec(new PagingSpec(next, 3));
}
query = newTestQuery().intervals(I_0112_0114).build();
for (int offset : expected) {
List<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, ImmutableMap.of()),
Lists.<Result<SelectResultValue>>newArrayList()
);
Assert.assertEquals(1, results.size());
SelectResultValue result = results.get(0).getValue();
Map<String, Integer> pagingIdentifiers = result.getPagingIdentifiers();
Assert.assertEquals(offset, pagingIdentifiers.get(QueryRunnerTestHelper.segmentId).intValue());
// use identifier as-is but with fromNext=true
query = query.withPagingSpec(new PagingSpec(pagingIdentifiers, 3, true));
}
}
@Test
public void testFullOnSelectWithDimensionSpec()
{
@ -169,12 +215,8 @@ public class SelectQueryRunnerTest
map.put("technology", "technology0");
map.put("travel", "travel0");
SelectQuery query = new SelectQuery(
new TableDataSource(QueryRunnerTestHelper.dataSource),
QueryRunnerTestHelper.fullOnInterval,
descending,
null,
QueryRunnerTestHelper.allGran,
SelectQuery query = newTestQuery()
.dimensionSpecs(
Arrays.<DimensionSpec>asList(
new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, "mar"),
new ExtractionDimensionSpec(
@ -183,9 +225,10 @@ public class SelectQueryRunnerTest
new LookupExtractionFn(new MapLookupExtractor(map, true), false, null, true, false)
),
new DefaultDimensionSpec(QueryRunnerTestHelper.placementDimension, "place")
), Lists.<String>newArrayList(), new PagingSpec(null, 3),
null
);
)
)
.build();
HashMap<String, Object> context = new HashMap<String, Object>();
Iterable<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, context),
@ -286,17 +329,12 @@ public class SelectQueryRunnerTest
@Test
public void testSelectWithDimsAndMets()
{
SelectQuery query = new SelectQuery(
new TableDataSource(QueryRunnerTestHelper.dataSource),
I_0112_0114,
descending,
null,
QueryRunnerTestHelper.allGran,
DefaultDimensionSpec.toSpec(Arrays.asList(QueryRunnerTestHelper.marketDimension)),
Arrays.asList(QueryRunnerTestHelper.indexMetric),
new PagingSpec(null, 3),
null
);
SelectQuery query = newTestQuery()
.intervals(I_0112_0114)
.dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.marketDimension))
.metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric))
.build();
HashMap<String, Object> context = new HashMap<String, Object>();
Iterable<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, context),
@ -325,17 +363,12 @@ public class SelectQueryRunnerTest
@Test
public void testSelectPagination()
{
SelectQuery query = new SelectQuery(
new TableDataSource(QueryRunnerTestHelper.dataSource),
I_0112_0114,
descending,
null,
QueryRunnerTestHelper.allGran,
DefaultDimensionSpec.toSpec(Arrays.asList(QueryRunnerTestHelper.qualityDimension)),
Arrays.asList(QueryRunnerTestHelper.indexMetric),
new PagingSpec(toPagingIdentifier(3, descending), 3),
null
);
SelectQuery query = newTestQuery()
.intervals(I_0112_0114)
.dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension))
.metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric))
.pagingSpec(new PagingSpec(toPagingIdentifier(3, descending), 3))
.build();
Iterable<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, Maps.newHashMap()),
@ -363,17 +396,15 @@ public class SelectQueryRunnerTest
{
// startDelta + threshold pairs
for (int[] param : new int[][]{{3, 3}, {0, 1}, {5, 5}, {2, 7}, {3, 0}}) {
SelectQuery query = new SelectQuery(
new TableDataSource(QueryRunnerTestHelper.dataSource),
I_0112_0114,
descending,
new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot"),
QueryRunnerTestHelper.dayGran,
DefaultDimensionSpec.toSpec(Lists.<String>newArrayList(QueryRunnerTestHelper.qualityDimension)),
Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric),
new PagingSpec(toPagingIdentifier(param[0], descending), param[1]),
null
);
SelectQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot"))
.granularity(QueryRunnerTestHelper.dayGran)
.dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension))
.metrics(Lists.<String>newArrayList(QueryRunnerTestHelper.indexMetric))
.pagingSpec(new PagingSpec(toPagingIdentifier(param[0], descending), param[1]))
.build();
HashMap<String, Object> context = new HashMap<String, Object>();
Iterable<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, context),
@ -427,20 +458,17 @@ public class SelectQueryRunnerTest
@Test
public void testFullSelectNoResults()
{
SelectQuery query = new SelectQuery(
new TableDataSource(QueryRunnerTestHelper.dataSource),
I_0112_0114,
descending,
SelectQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(
new AndDimFilter(
Arrays.<DimFilter>asList(
new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot"),
new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "foo")
)
),
QueryRunnerTestHelper.allGran,
DefaultDimensionSpec.toSpec(Lists.<String>newArrayList()), Lists.<String>newArrayList(), new PagingSpec(null, 3),
null
);
)
)
.build();
Iterable<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, Maps.newHashMap()),
@ -463,17 +491,11 @@ public class SelectQueryRunnerTest
@Test
public void testFullSelectNoDimensionAndMetric()
{
SelectQuery query = new SelectQuery(
new TableDataSource(QueryRunnerTestHelper.dataSource),
I_0112_0114,
descending,
null,
QueryRunnerTestHelper.allGran,
DefaultDimensionSpec.toSpec(Lists.<String>newArrayList("foo")),
Lists.<String>newArrayList("foo2"),
new PagingSpec(null, 3),
null
);
SelectQuery query = newTestQuery()
.intervals(I_0112_0114)
.dimensionSpecs(DefaultDimensionSpec.toSpec("foo"))
.metrics(Lists.<String>newArrayList("foo2"))
.build();
Iterable<Result<SelectResultValue>> results = Sequences.toList(
runner.run(query, Maps.newHashMap()),
@ -498,13 +520,11 @@ public class SelectQueryRunnerTest
verify(expectedResults, results);
}
private LinkedHashMap<String, Integer> toPagingIdentifier(int startDelta, boolean descending)
private Map<String, Integer> toPagingIdentifier(int startDelta, boolean descending)
{
return Maps.newLinkedHashMap(
ImmutableMap.of(
return ImmutableMap.of(
QueryRunnerTestHelper.segmentId,
PagingOffset.toOffset(startDelta, descending)
)
);
}

View File

@ -59,7 +59,7 @@ public class SelectQuerySpecTest
+ "\"granularity\":{\"type\":\"all\"},"
+ "\"dimensions\":[{\"type\":\"default\",\"dimension\":\"market\",\"outputName\":\"market\"},{\"type\":\"default\",\"dimension\":\"quality\",\"outputName\":\"quality\"}],"
+ "\"metrics\":[\"index\"],"
+ "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3},"
+ "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3,\"fromNext\":false},"
+ "\"context\":null}";
SelectQuery query = new SelectQuery(