Fixing tests WIP

This commit is contained in:
Justin Borromeo 2019-02-06 17:39:48 -08:00
parent 85e72a614e
commit b2c8c77ad4
9 changed files with 14 additions and 15 deletions

View File

@ -94,7 +94,7 @@ import java.util.concurrent.TimeUnit;
/* Works with 8GB heap size or greater. Otherwise there's a good chance of an OOME. */
@State(Scope.Benchmark)
@Fork(value = 1)
@Warmup(iterations = 10)
@Warmup(iterations = 25)
@Measurement(iterations = 25)
public class ScanBenchmark
{

View File

@ -950,11 +950,11 @@ public class Druids
resultFormat,
batchSize,
limit,
timeOrder,
dimFilter,
columns,
legacy,
context,
timeOrder
context
);
}

View File

@ -61,11 +61,11 @@ public class ScanQuery extends BaseQuery<ScanResultValue>
@JsonProperty("resultFormat") String resultFormat,
@JsonProperty("batchSize") int batchSize,
@JsonProperty("limit") long limit,
@JsonProperty("timeOrder") String timeOrder,
@JsonProperty("filter") DimFilter dimFilter,
@JsonProperty("columns") List<String> columns,
@JsonProperty("legacy") Boolean legacy,
@JsonProperty("context") Map<String, Object> context,
@JsonProperty("timeOrder") String timeOrder
@JsonProperty("context") Map<String, Object> context
)
{
super(dataSource, querySegmentSpec, false, context);

View File

@ -54,7 +54,6 @@ public class ScanQueryQueryToolChest extends QueryToolChest<ScanResultValue, Sca
private final ScanQueryConfig scanQueryConfig;
private final GenericQueryMetricsFactory queryMetricsFactory;
private final long maxRowsForInMemoryTimeOrdering;
@Inject
public ScanQueryQueryToolChest(
@ -64,7 +63,6 @@ public class ScanQueryQueryToolChest extends QueryToolChest<ScanResultValue, Sca
{
this.scanQueryConfig = scanQueryConfig;
this.queryMetricsFactory = queryMetricsFactory;
this.maxRowsForInMemoryTimeOrdering = scanQueryConfig.getMaxRowsTimeOrderedInMemory();
}
@Override
@ -200,7 +198,7 @@ public class ScanQueryQueryToolChest extends QueryToolChest<ScanResultValue, Sca
return sortedElements.iterator();
}
private class ScanBatchedTimeOrderedQueueIterator implements CloseableIterator<ScanResultValue>
private static class ScanBatchedTimeOrderedQueueIterator implements CloseableIterator<ScanResultValue>
{
private final Iterator<ScanResultValue> itr;
private final int batchSize;

View File

@ -49,7 +49,7 @@ public class ScanQueryQueryToolChestTest
private static QuerySegmentSpec emptySegmentSpec;
@BeforeClass
public void setup()
public static void setup()
{
config = createNiceMock(ScanQueryConfig.class);
expect(config.getMaxRowsTimeOrderedInMemory()).andReturn(100000);

View File

@ -68,11 +68,11 @@ public class ScanQuerySpecTest
null,
0,
3,
"none",
null,
Arrays.asList("market", "quality", "index"),
null,
null,
"none"
null
);
String actual = jsonMapper.writeValueAsString(query);

View File

@ -39,7 +39,7 @@ public class ScanResultValueTimestampComparatorTest
private static QuerySegmentSpec intervalSpec;
@BeforeClass
public void setup()
public static void setup()
{
intervalSpec = new MultipleIntervalSegmentSpec(
Collections.singletonList(

View File

@ -964,11 +964,12 @@ public class DruidQuery
ScanQuery.RESULT_FORMAT_COMPACTED_LIST,
0,
scanLimit,
null, // Will default to "none"
filtration.getDimFilter(),
Ordering.natural().sortedCopy(ImmutableSet.copyOf(outputRowSignature.getRowOrder())),
false,
ImmutableSortedMap.copyOf(plannerContext.getQueryContext()),
null // Will default to "none"
ImmutableSortedMap.copyOf(plannerContext.getQueryContext())
);
}

View File

@ -770,7 +770,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
+ " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":"
+ emptyStringEq
+ ",\"extractionFn\":null}},\"columns\":[\"dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING}])\n"
+ " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"dim1\",\"dim2\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING, dim2:STRING}])\n";
+ " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"timeOrder\":\"none\",\"filter\":null,\"columns\":[\"dim1\",\"dim2\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING, dim2:STRING}])\n";
testQuery(
PLANNER_CONFIG_FALLBACK,