diff --git a/docs/content/querying/select-query.md b/docs/content/querying/select-query.md index 75eaf310eaa..274da89cc9f 100644 --- a/docs/content/querying/select-query.md +++ b/docs/content/querying/select-query.md @@ -167,5 +167,15 @@ This can be used with the next query's pagingSpec: "pagingSpec":{"pagingIdentifiers": {"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9" : 5}, "threshold":5} } +``` -Note that in the second query, an offset is specified and that it is 1 greater than the largest offset found in the initial results. To return the next "page", this offset must be incremented by 1 (should be decremented by 1 for descending query), with each new query. When an empty results set is received, the very last page has been returned. +Note that in the second query, an offset is specified and that it is 1 greater than the largest offset found in the initial results. To return the next "page", this offset must be incremented by 1 (should be decremented by 1 for descending query), with each new query, but with option `fromNext` enabled, this operation is not needed. When an empty results set is received, the very last page has been returned. + +`fromNext` options is in pagingSpec: + +```json + { + ... + "pagingSpec":{"pagingIdentifiers": {}, "threshold":5, "fromNext": true} + } +``` diff --git a/processing/src/main/java/io/druid/query/select/PagingSpec.java b/processing/src/main/java/io/druid/query/select/PagingSpec.java index 38d5f141240..3cc821645eb 100644 --- a/processing/src/main/java/io/druid/query/select/PagingSpec.java +++ b/processing/src/main/java/io/druid/query/select/PagingSpec.java @@ -21,28 +21,60 @@ package io.druid.query.select; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.metamx.common.StringUtils; import java.nio.ByteBuffer; -import java.util.LinkedHashMap; import java.util.Map; /** */ public class PagingSpec { - private final LinkedHashMap pagingIdentifiers; + public static PagingSpec newSpec(int threshold) + { + return new PagingSpec(null, threshold); + } + + public static Map merge(Iterable> cursors) + { + Map next = Maps.newHashMap(); + for (Map cursor : cursors) { + for (Map.Entry entry : cursor.entrySet()) { + next.put(entry.getKey(), entry.getValue()); + } + } + return next; + } + + public static Map next(Map cursor, boolean descending) + { + for (Map.Entry entry : cursor.entrySet()) { + entry.setValue(descending ? entry.getValue() - 1 : entry.getValue() + 1); + } + return cursor; + } + + private final Map pagingIdentifiers; private final int threshold; + private final boolean fromNext; @JsonCreator public PagingSpec( - @JsonProperty("pagingIdentifiers") LinkedHashMap pagingIdentifiers, - @JsonProperty("threshold") int threshold + @JsonProperty("pagingIdentifiers") Map pagingIdentifiers, + @JsonProperty("threshold") int threshold, + @JsonProperty("fromNext") boolean fromNext ) { - this.pagingIdentifiers = pagingIdentifiers == null ? new LinkedHashMap() : pagingIdentifiers; + this.pagingIdentifiers = pagingIdentifiers == null ? Maps.newHashMap() : pagingIdentifiers; this.threshold = threshold; + this.fromNext = fromNext; + } + + public PagingSpec(Map pagingIdentifiers, int threshold) + { + this(pagingIdentifiers, threshold, false); } @JsonProperty @@ -57,6 +89,12 @@ public class PagingSpec return threshold; } + @JsonProperty + public boolean isFromNext() + { + return fromNext; + } + public byte[] getCacheKey() { final byte[][] pagingKeys = new byte[pagingIdentifiers.size()][]; @@ -75,7 +113,7 @@ public class PagingSpec final byte[] thresholdBytes = ByteBuffer.allocate(Ints.BYTES).putInt(threshold).array(); - final ByteBuffer queryCacheKey = ByteBuffer.allocate(pagingKeysSize + pagingValuesSize + thresholdBytes.length); + final ByteBuffer queryCacheKey = ByteBuffer.allocate(pagingKeysSize + pagingValuesSize + thresholdBytes.length + 1); for (byte[] pagingKey : pagingKeys) { queryCacheKey.put(pagingKey); @@ -86,22 +124,37 @@ public class PagingSpec } queryCacheKey.put(thresholdBytes); + queryCacheKey.put(isFromNext() ? (byte) 0x01 : 0x00); return queryCacheKey.array(); } + public PagingOffset getOffset(String identifier, boolean descending) + { + Integer offset = pagingIdentifiers.get(identifier); + if (offset == null) { + offset = PagingOffset.toOffset(0, descending); + } else if (fromNext) { + offset = descending ? offset - 1 : offset + 1; + } + return PagingOffset.of(offset, threshold); + } + @Override public boolean equals(Object o) { if (this == o) { return true; } - if (!(o instanceof PagingSpec)) { + if (o == null || getClass() != o.getClass()) { return false; } PagingSpec that = (PagingSpec) o; + if (fromNext != that.fromNext) { + return false; + } if (threshold != that.threshold) { return false; } @@ -117,6 +170,7 @@ public class PagingSpec { int result = pagingIdentifiers.hashCode(); result = 31 * result + threshold; + result = 31 * result + (fromNext ? 1 : 0); return result; } @@ -126,16 +180,7 @@ public class PagingSpec return "PagingSpec{" + "pagingIdentifiers=" + pagingIdentifiers + ", threshold=" + threshold + + ", fromNext=" + fromNext + '}'; } - - public PagingOffset getOffset(String identifier, boolean descending) - { - Integer offset = pagingIdentifiers.get(identifier); - if (offset == null) { - offset = PagingOffset.toOffset(0, descending); - } - return PagingOffset.of(offset, threshold); - } - } diff --git a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java index ee7193f5b23..8b0aaae6ced 100644 --- a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java @@ -21,7 +21,6 @@ package io.druid.query.select; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.CharSource; import com.metamx.common.guava.Sequences; import io.druid.granularity.QueryGranularity; @@ -53,14 +52,17 @@ import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.io.IOException; -import java.util.LinkedHashMap; +import java.util.Arrays; import java.util.List; import java.util.Map; /** */ +@RunWith(Parameterized.class) public class MultiSegmentSelectQueryTest { private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest( @@ -188,51 +190,52 @@ public class MultiSegmentSelectQueryTest IOUtils.closeQuietly(segment_override); } - private final Druids.SelectQueryBuilder builder = - Druids.newSelectQueryBuilder() - .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) - .intervals(SelectQueryRunnerTest.I_0112_0114) - .granularity(QueryRunnerTestHelper.allGran) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.dimensions)) - .pagingSpec(new PagingSpec(null, 3)); - - @Test - public void testAllGranularityAscending() + @Parameterized.Parameters(name = "fromNext={0}") + public static Iterable constructorFeeder() throws IOException { - SelectQuery query = builder.build(); + return QueryRunnerTestHelper.cartesian(Arrays.asList(false, true)); + } - for (int[] expected : new int[][]{ - {2, -1, -1, -1, 3}, {3, 1, -1, -1, 3}, {-1, 3, 0, -1, 3}, {-1, -1, 3, -1, 3}, {-1, -1, 4, 1, 3}, - {-1, -1, -1, 4, 3}, {-1, -1, -1, 7, 3}, {-1, -1, -1, 10, 3}, {-1, -1, -1, 12, 2}, {-1, -1, -1, 13, 0} - }) { - List> results = Sequences.toList( - runner.run(query, ImmutableMap.of()), - Lists.>newArrayList() - ); - Assert.assertEquals(1, results.size()); + private final boolean fromNext; - SelectResultValue value = results.get(0).getValue(); - Map pagingIdentifiers = value.getPagingIdentifiers(); - for (int i = 0; i < expected.length - 1; i++) { - if (expected[i] >= 0) { - Assert.assertEquals(expected[i], pagingIdentifiers.get(segmentIdentifiers.get(i)).intValue()); - } - } - Assert.assertEquals(expected[expected.length - 1], value.getEvents().size()); + public MultiSegmentSelectQueryTest(boolean fromNext) + { + this.fromNext = fromNext; + } - query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers)); - } + private Druids.SelectQueryBuilder newBuilder() + { + return Druids.newSelectQueryBuilder() + .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) + .intervals(SelectQueryRunnerTest.I_0112_0114) + .granularity(QueryRunnerTestHelper.allGran) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.dimensions)) + .pagingSpec(PagingSpec.newSpec(3)); } @Test - public void testAllGranularityDescending() + public void testAllGranularity() { - SelectQuery query = builder.descending(true).build(); + runAllGranularityTest( + newBuilder().build(), + new int[][]{ + {2, -1, -1, -1, 3}, {3, 1, -1, -1, 3}, {-1, 3, 0, -1, 3}, {-1, -1, 3, -1, 3}, {-1, -1, 4, 1, 3}, + {-1, -1, -1, 4, 3}, {-1, -1, -1, 7, 3}, {-1, -1, -1, 10, 3}, {-1, -1, -1, 12, 2}, {-1, -1, -1, 13, 0} + } + ); - for (int[] expected : new int[][]{ - {0, 0, 0, -3, 3}, {0, 0, 0, -6, 3}, {0, 0, 0, -9, 3}, {0, 0, 0, -12, 3}, {0, 0, -2, -13, 3}, - {0, 0, -5, 0, 3}, {0, -3, 0, 0, 3}, {-2, -4, 0, 0, 3}, {-4, 0, 0, 0, 2}, {-5, 0, 0, 0, 0} - }) { + runAllGranularityTest( + newBuilder().descending(true).build(), + new int[][]{ + {0, 0, 0, -3, 3}, {0, 0, 0, -6, 3}, {0, 0, 0, -9, 3}, {0, 0, 0, -12, 3}, {0, 0, -2, -13, 3}, + {0, 0, -5, 0, 3}, {0, -3, 0, 0, 3}, {-2, -4, 0, 0, 3}, {-4, 0, 0, 0, 2}, {-5, 0, 0, 0, 0} + } + ); + } + + private void runAllGranularityTest(SelectQuery query, int[][] expectedOffsets) + { + for (int[] expected : expectedOffsets) { List> results = Sequences.toList( runner.run(query, ImmutableMap.of()), Lists.>newArrayList() @@ -242,26 +245,44 @@ public class MultiSegmentSelectQueryTest SelectResultValue value = results.get(0).getValue(); Map pagingIdentifiers = value.getPagingIdentifiers(); - for (int i = 0; i < expected.length - 1; i++) { - if (expected[i] < 0) { - Assert.assertEquals(expected[i], pagingIdentifiers.get(segmentIdentifiers.get(i)).intValue()); + Map merged = PagingSpec.merge(Arrays.asList(pagingIdentifiers)); + + for (int i = 0; i < 4; i++) { + if (query.isDescending() ^ expected[i] >= 0) { + Assert.assertEquals( + expected[i], pagingIdentifiers.get(segmentIdentifiers.get(i)).intValue() + ); } } - Assert.assertEquals(expected[expected.length - 1], value.getEvents().size()); + Assert.assertEquals(expected[4], value.getEvents().size()); - query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers)); + query = query.withPagingSpec(toNextCursor(merged, query, 3)); } } @Test - public void testDayGranularityAscending() + public void testDayGranularity() { - SelectQuery query = builder.granularity(QueryRunnerTestHelper.dayGran).build(); + runDayGranularityTest( + newBuilder().granularity(QueryRunnerTestHelper.dayGran).build(), + new int[][]{ + {2, -1, -1, 2, 3, 0, 0, 3}, {3, 1, -1, 5, 1, 2, 0, 3}, {-1, 3, 0, 8, 0, 2, 1, 3}, + {-1, -1, 3, 11, 0, 0, 3, 3}, {-1, -1, 4, 12, 0, 0, 1, 1}, {-1, -1, 5, 13, 0, 0, 0, 0} + } + ); - for (int[] expected : new int[][]{ - {2, -1, -1, 2, 3, 0, 0, 3}, {3, 1, -1, 5, 1, 2, 0, 3}, {-1, 3, 0, 8, 0, 2, 1, 3}, - {-1, -1, 3, 11, 0, 0, 3, 3}, {-1, -1, 4, 12, 0, 0, 1, 1}, {-1, -1, 5, 13, 0, 0, 0, 0} - }) { + runDayGranularityTest( + newBuilder().granularity(QueryRunnerTestHelper.dayGran).descending(true).build(), + new int[][]{ + {0, 0, -3, -3, 0, 0, 3, 3}, {0, -1, -5, -6, 0, 1, 2, 3}, {0, -4, 0, -9, 0, 3, 0, 3}, + {-3, 0, 0, -12, 3, 0, 0, 3}, {-4, 0, 0, -13, 1, 0, 0, 1}, {-5, 0, 0, -14, 0, 0, 0, 0} + } + ); + } + + private void runDayGranularityTest(SelectQuery query, int[][] expectedOffsets) + { + for (int[] expected : expectedOffsets) { List> results = Sequences.toList( runner.run(query, ImmutableMap.of()), Lists.>newArrayList() @@ -274,59 +295,23 @@ public class MultiSegmentSelectQueryTest Map pagingIdentifiers0 = value0.getPagingIdentifiers(); Map pagingIdentifiers1 = value1.getPagingIdentifiers(); + Map merged = PagingSpec.merge(Arrays.asList(pagingIdentifiers0, pagingIdentifiers1)); + for (int i = 0; i < 4; i++) { - if (expected[i] >= 0) { - Map paging = i < 3 ? pagingIdentifiers0 : pagingIdentifiers1; - Assert.assertEquals(expected[i], paging.get(segmentIdentifiers.get(i)).intValue()); + if (query.isDescending() ^ expected[i] >= 0) { + Assert.assertEquals(expected[i], merged.get(segmentIdentifiers.get(i)).intValue()); } } - query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers0, pagingIdentifiers1)); + query = query.withPagingSpec(toNextCursor(merged, query, 3)); } } - @Test - public void testDayGranularityDescending() + private PagingSpec toNextCursor(Map merged, SelectQuery query, int threshold) { - QueryGranularity granularity = QueryRunnerTestHelper.dayGran; - SelectQuery query = builder.granularity(granularity).descending(true).build(); - - for (int[] expected : new int[][]{ - {0, 0, -3, -3, 0, 0, 3, 3}, {0, -1, -5, -6, 0, 1, 2, 3}, {0, -4, 0, -9, 0, 3, 0, 3}, - {-3, 0, 0, -12, 3, 0, 0, 3}, {-4, 0, 0, -13, 1, 0, 0, 1}, {-5, 0, 0, -14, 0, 0, 0, 0} - }) { - List> results = Sequences.toList( - runner.run(query, ImmutableMap.of()), - Lists.>newArrayList() - ); - Assert.assertEquals(2, results.size()); - - SelectResultValue value0 = results.get(0).getValue(); - SelectResultValue value1 = results.get(1).getValue(); - - Map pagingIdentifiers0 = value0.getPagingIdentifiers(); - Map pagingIdentifiers1 = value1.getPagingIdentifiers(); - - for (int i = 0; i < 4; i++) { - if (expected[i] < 0) { - Map paging = i < 3 ? pagingIdentifiers1 : pagingIdentifiers0; - Assert.assertEquals(expected[i], paging.get(segmentIdentifiers.get(i)).intValue()); - } - } - - query = query.withPagingSpec(toNextPager(3, query.isDescending(), pagingIdentifiers0, pagingIdentifiers1)); + if (!fromNext) { + merged = PagingSpec.next(merged, query.isDescending()); } - } - - @SafeVarargs - private final PagingSpec toNextPager(int threshold, boolean descending, Map... pagers) - { - LinkedHashMap next = Maps.newLinkedHashMap(); - for (Map pager : pagers) { - for (Map.Entry entry : pager.entrySet()) { - next.put(entry.getKey(), descending ? entry.getValue() - 1 : entry.getValue() + 1); - } - } - return new PagingSpec(next, threshold); + return new PagingSpec(merged, threshold, fromNext); } } diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 4831d68fed5..58895b35ce5 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -28,6 +28,7 @@ import com.google.common.collect.ObjectArrays; import com.metamx.common.ISE; import com.metamx.common.guava.Sequences; import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; @@ -53,7 +54,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -126,20 +126,24 @@ public class SelectQueryRunnerTest this.descending = descending; } + private Druids.SelectQueryBuilder newTestQuery() { + return Druids.newSelectQueryBuilder() + .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) + .dimensionSpecs(DefaultDimensionSpec.toSpec(Arrays.asList())) + .metrics(Arrays.asList()) + .intervals(QueryRunnerTestHelper.fullOnInterval) + .granularity(QueryRunnerTestHelper.allGran) + .pagingSpec(PagingSpec.newSpec(3)) + .descending(descending); + } + @Test public void testFullOnSelect() { - SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), - I_0112_0114, - descending, - null, - QueryRunnerTestHelper.allGran, - DefaultDimensionSpec.toSpec(Arrays.asList()), - Arrays.asList(), - new PagingSpec(null, 3), - null - ); + SelectQuery query = newTestQuery() + .intervals(I_0112_0114) + .build(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( runner.run(query, context), @@ -155,6 +159,48 @@ public class SelectQueryRunnerTest verify(expectedResults, results); } + @Test + public void testSequentialPaging() + { + int[] asc = {2, 5, 8, 11, 14, 17, 20, 23, 25}; + int[] dsc = {-3, -6, -9, -12, -15, -18, -21, -24, -26}; + int[] expected = descending ? dsc : asc; + + SelectQuery query = newTestQuery().intervals(I_0112_0114).build(); + for (int offset : expected) { + List> results = Sequences.toList( + runner.run(query, ImmutableMap.of()), + Lists.>newArrayList() + ); + + Assert.assertEquals(1, results.size()); + + SelectResultValue result = results.get(0).getValue(); + Map pagingIdentifiers = result.getPagingIdentifiers(); + Assert.assertEquals(offset, pagingIdentifiers.get(QueryRunnerTestHelper.segmentId).intValue()); + + Map next = PagingSpec.next(pagingIdentifiers, descending); + query = query.withPagingSpec(new PagingSpec(next, 3)); + } + + query = newTestQuery().intervals(I_0112_0114).build(); + for (int offset : expected) { + List> results = Sequences.toList( + runner.run(query, ImmutableMap.of()), + Lists.>newArrayList() + ); + + Assert.assertEquals(1, results.size()); + + SelectResultValue result = results.get(0).getValue(); + Map pagingIdentifiers = result.getPagingIdentifiers(); + Assert.assertEquals(offset, pagingIdentifiers.get(QueryRunnerTestHelper.segmentId).intValue()); + + // use identifier as-is but with fromNext=true + query = query.withPagingSpec(new PagingSpec(pagingIdentifiers, 3, true)); + } + } + @Test public void testFullOnSelectWithDimensionSpec() { @@ -169,23 +215,20 @@ public class SelectQueryRunnerTest map.put("technology", "technology0"); map.put("travel", "travel0"); - SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), - QueryRunnerTestHelper.fullOnInterval, - descending, - null, - QueryRunnerTestHelper.allGran, - Arrays.asList( - new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, "mar"), - new ExtractionDimensionSpec( - QueryRunnerTestHelper.qualityDimension, - "qual", - new LookupExtractionFn(new MapLookupExtractor(map, true), false, null, true, false) - ), - new DefaultDimensionSpec(QueryRunnerTestHelper.placementDimension, "place") - ), Lists.newArrayList(), new PagingSpec(null, 3), - null - ); + SelectQuery query = newTestQuery() + .dimensionSpecs( + Arrays.asList( + new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, "mar"), + new ExtractionDimensionSpec( + QueryRunnerTestHelper.qualityDimension, + "qual", + new LookupExtractionFn(new MapLookupExtractor(map, true), false, null, true, false) + ), + new DefaultDimensionSpec(QueryRunnerTestHelper.placementDimension, "place") + ) + ) + .build(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( runner.run(query, context), @@ -286,17 +329,12 @@ public class SelectQueryRunnerTest @Test public void testSelectWithDimsAndMets() { - SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), - I_0112_0114, - descending, - null, - QueryRunnerTestHelper.allGran, - DefaultDimensionSpec.toSpec(Arrays.asList(QueryRunnerTestHelper.marketDimension)), - Arrays.asList(QueryRunnerTestHelper.indexMetric), - new PagingSpec(null, 3), - null - ); + SelectQuery query = newTestQuery() + .intervals(I_0112_0114) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.marketDimension)) + .metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric)) + .build(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( runner.run(query, context), @@ -325,17 +363,12 @@ public class SelectQueryRunnerTest @Test public void testSelectPagination() { - SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), - I_0112_0114, - descending, - null, - QueryRunnerTestHelper.allGran, - DefaultDimensionSpec.toSpec(Arrays.asList(QueryRunnerTestHelper.qualityDimension)), - Arrays.asList(QueryRunnerTestHelper.indexMetric), - new PagingSpec(toPagingIdentifier(3, descending), 3), - null - ); + SelectQuery query = newTestQuery() + .intervals(I_0112_0114) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) + .metrics(Arrays.asList(QueryRunnerTestHelper.indexMetric)) + .pagingSpec(new PagingSpec(toPagingIdentifier(3, descending), 3)) + .build(); Iterable> results = Sequences.toList( runner.run(query, Maps.newHashMap()), @@ -363,17 +396,15 @@ public class SelectQueryRunnerTest { // startDelta + threshold pairs for (int[] param : new int[][]{{3, 3}, {0, 1}, {5, 5}, {2, 7}, {3, 0}}) { - SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), - I_0112_0114, - descending, - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot"), - QueryRunnerTestHelper.dayGran, - DefaultDimensionSpec.toSpec(Lists.newArrayList(QueryRunnerTestHelper.qualityDimension)), - Lists.newArrayList(QueryRunnerTestHelper.indexMetric), - new PagingSpec(toPagingIdentifier(param[0], descending), param[1]), - null - ); + SelectQuery query = newTestQuery() + .intervals(I_0112_0114) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot")) + .granularity(QueryRunnerTestHelper.dayGran) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) + .metrics(Lists.newArrayList(QueryRunnerTestHelper.indexMetric)) + .pagingSpec(new PagingSpec(toPagingIdentifier(param[0], descending), param[1])) + .build(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( runner.run(query, context), @@ -427,20 +458,17 @@ public class SelectQueryRunnerTest @Test public void testFullSelectNoResults() { - SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), - I_0112_0114, - descending, - new AndDimFilter( + SelectQuery query = newTestQuery() + .intervals(I_0112_0114) + .filters( + new AndDimFilter( Arrays.asList( new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot"), new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "foo") ) - ), - QueryRunnerTestHelper.allGran, - DefaultDimensionSpec.toSpec(Lists.newArrayList()), Lists.newArrayList(), new PagingSpec(null, 3), - null - ); + ) + ) + .build(); Iterable> results = Sequences.toList( runner.run(query, Maps.newHashMap()), @@ -463,17 +491,11 @@ public class SelectQueryRunnerTest @Test public void testFullSelectNoDimensionAndMetric() { - SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), - I_0112_0114, - descending, - null, - QueryRunnerTestHelper.allGran, - DefaultDimensionSpec.toSpec(Lists.newArrayList("foo")), - Lists.newArrayList("foo2"), - new PagingSpec(null, 3), - null - ); + SelectQuery query = newTestQuery() + .intervals(I_0112_0114) + .dimensionSpecs(DefaultDimensionSpec.toSpec("foo")) + .metrics(Lists.newArrayList("foo2")) + .build(); Iterable> results = Sequences.toList( runner.run(query, Maps.newHashMap()), @@ -498,13 +520,11 @@ public class SelectQueryRunnerTest verify(expectedResults, results); } - private LinkedHashMap toPagingIdentifier(int startDelta, boolean descending) + private Map toPagingIdentifier(int startDelta, boolean descending) { - return Maps.newLinkedHashMap( - ImmutableMap.of( - QueryRunnerTestHelper.segmentId, - PagingOffset.toOffset(startDelta, descending) - ) + return ImmutableMap.of( + QueryRunnerTestHelper.segmentId, + PagingOffset.toOffset(startDelta, descending) ); } diff --git a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java index d56f51b66ee..2095650ed49 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java @@ -59,7 +59,7 @@ public class SelectQuerySpecTest + "\"granularity\":{\"type\":\"all\"}," + "\"dimensions\":[{\"type\":\"default\",\"dimension\":\"market\",\"outputName\":\"market\"},{\"type\":\"default\",\"dimension\":\"quality\",\"outputName\":\"quality\"}]," + "\"metrics\":[\"index\"]," - + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3}," + + "\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":3,\"fromNext\":false}," + "\"context\":null}"; SelectQuery query = new SelectQuery(