Fixing tests WIP

This commit is contained in:
Justin Borromeo 2019-02-06 17:39:48 -08:00
parent 85e72a614e
commit b2c8c77ad4
9 changed files with 14 additions and 15 deletions

View File

@ -94,7 +94,7 @@ import java.util.concurrent.TimeUnit;
/* Works with 8GB heap size or greater. Otherwise there's a good chance of an OOME. */ /* Works with 8GB heap size or greater. Otherwise there's a good chance of an OOME. */
@State(Scope.Benchmark) @State(Scope.Benchmark)
@Fork(value = 1) @Fork(value = 1)
@Warmup(iterations = 10) @Warmup(iterations = 25)
@Measurement(iterations = 25) @Measurement(iterations = 25)
public class ScanBenchmark public class ScanBenchmark
{ {

View File

@ -950,11 +950,11 @@ public class Druids
resultFormat, resultFormat,
batchSize, batchSize,
limit, limit,
timeOrder,
dimFilter, dimFilter,
columns, columns,
legacy, legacy,
context, context
timeOrder
); );
} }

View File

@ -61,11 +61,11 @@ public class ScanQuery extends BaseQuery<ScanResultValue>
@JsonProperty("resultFormat") String resultFormat, @JsonProperty("resultFormat") String resultFormat,
@JsonProperty("batchSize") int batchSize, @JsonProperty("batchSize") int batchSize,
@JsonProperty("limit") long limit, @JsonProperty("limit") long limit,
@JsonProperty("timeOrder") String timeOrder,
@JsonProperty("filter") DimFilter dimFilter, @JsonProperty("filter") DimFilter dimFilter,
@JsonProperty("columns") List<String> columns, @JsonProperty("columns") List<String> columns,
@JsonProperty("legacy") Boolean legacy, @JsonProperty("legacy") Boolean legacy,
@JsonProperty("context") Map<String, Object> context, @JsonProperty("context") Map<String, Object> context
@JsonProperty("timeOrder") String timeOrder
) )
{ {
super(dataSource, querySegmentSpec, false, context); super(dataSource, querySegmentSpec, false, context);

View File

@ -54,7 +54,6 @@ public class ScanQueryQueryToolChest extends QueryToolChest<ScanResultValue, Sca
private final ScanQueryConfig scanQueryConfig; private final ScanQueryConfig scanQueryConfig;
private final GenericQueryMetricsFactory queryMetricsFactory; private final GenericQueryMetricsFactory queryMetricsFactory;
private final long maxRowsForInMemoryTimeOrdering;
@Inject @Inject
public ScanQueryQueryToolChest( public ScanQueryQueryToolChest(
@ -64,7 +63,6 @@ public class ScanQueryQueryToolChest extends QueryToolChest<ScanResultValue, Sca
{ {
this.scanQueryConfig = scanQueryConfig; this.scanQueryConfig = scanQueryConfig;
this.queryMetricsFactory = queryMetricsFactory; this.queryMetricsFactory = queryMetricsFactory;
this.maxRowsForInMemoryTimeOrdering = scanQueryConfig.getMaxRowsTimeOrderedInMemory();
} }
@Override @Override
@ -200,7 +198,7 @@ public class ScanQueryQueryToolChest extends QueryToolChest<ScanResultValue, Sca
return sortedElements.iterator(); return sortedElements.iterator();
} }
private class ScanBatchedTimeOrderedQueueIterator implements CloseableIterator<ScanResultValue> private static class ScanBatchedTimeOrderedQueueIterator implements CloseableIterator<ScanResultValue>
{ {
private final Iterator<ScanResultValue> itr; private final Iterator<ScanResultValue> itr;
private final int batchSize; private final int batchSize;

View File

@ -49,7 +49,7 @@ public class ScanQueryQueryToolChestTest
private static QuerySegmentSpec emptySegmentSpec; private static QuerySegmentSpec emptySegmentSpec;
@BeforeClass @BeforeClass
public void setup() public static void setup()
{ {
config = createNiceMock(ScanQueryConfig.class); config = createNiceMock(ScanQueryConfig.class);
expect(config.getMaxRowsTimeOrderedInMemory()).andReturn(100000); expect(config.getMaxRowsTimeOrderedInMemory()).andReturn(100000);

View File

@ -68,11 +68,11 @@ public class ScanQuerySpecTest
null, null,
0, 0,
3, 3,
"none",
null, null,
Arrays.asList("market", "quality", "index"), Arrays.asList("market", "quality", "index"),
null, null,
null, null
"none"
); );
String actual = jsonMapper.writeValueAsString(query); String actual = jsonMapper.writeValueAsString(query);

View File

@ -39,7 +39,7 @@ public class ScanResultValueTimestampComparatorTest
private static QuerySegmentSpec intervalSpec; private static QuerySegmentSpec intervalSpec;
@BeforeClass @BeforeClass
public void setup() public static void setup()
{ {
intervalSpec = new MultipleIntervalSegmentSpec( intervalSpec = new MultipleIntervalSegmentSpec(
Collections.singletonList( Collections.singletonList(

View File

@ -964,11 +964,12 @@ public class DruidQuery
ScanQuery.RESULT_FORMAT_COMPACTED_LIST, ScanQuery.RESULT_FORMAT_COMPACTED_LIST,
0, 0,
scanLimit, scanLimit,
null, // Will default to "none"
filtration.getDimFilter(), filtration.getDimFilter(),
Ordering.natural().sortedCopy(ImmutableSet.copyOf(outputRowSignature.getRowOrder())), Ordering.natural().sortedCopy(ImmutableSet.copyOf(outputRowSignature.getRowOrder())),
false, false,
ImmutableSortedMap.copyOf(plannerContext.getQueryContext()), ImmutableSortedMap.copyOf(plannerContext.getQueryContext())
null // Will default to "none"
); );
} }

View File

@ -770,7 +770,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
+ " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":" + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":"
+ emptyStringEq + emptyStringEq
+ ",\"extractionFn\":null}},\"columns\":[\"dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING}])\n" + ",\"extractionFn\":null}},\"columns\":[\"dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING}])\n"
+ " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"dim1\",\"dim2\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING, dim2:STRING}])\n"; + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"timeOrder\":\"none\",\"filter\":null,\"columns\":[\"dim1\",\"dim2\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING, dim2:STRING}])\n";
testQuery( testQuery(
PLANNER_CONFIG_FALLBACK, PLANNER_CONFIG_FALLBACK,