Added TimestampComparator tests

This commit is contained in:
Justin Borromeo 2019-02-04 12:02:13 -08:00
parent ad731a362b
commit 12e51a2721
4 changed files with 154 additions and 9 deletions

View File

@ -94,8 +94,8 @@ import java.util.concurrent.TimeUnit;
/* Works with 4GB heap size or greater. Otherwise there's a good chance of an OOME. */ /* Works with 4GB heap size or greater. Otherwise there's a good chance of an OOME. */
@State(Scope.Benchmark) @State(Scope.Benchmark)
@Fork(value = 1) @Fork(value = 1)
@Warmup(iterations = 5) @Warmup(iterations = 10)
@Measurement(iterations = 5) @Measurement(iterations = 25)
public class ScanBenchmark public class ScanBenchmark
{ {
@Param({"1", "4"}) @Param({"1", "4"})

View File

@ -22,7 +22,6 @@ package org.apache.druid.query.scan;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Functions; import com.google.common.base.Functions;
import com.google.common.primitives.Longs;
import com.google.inject.Inject; import com.google.inject.Inject;
import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.UOE;
import org.apache.druid.java.util.common.guava.BaseSequence; import org.apache.druid.java.util.common.guava.BaseSequence;
@ -36,7 +35,6 @@ import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryToolChest; import org.apache.druid.query.QueryToolChest;
import org.apache.druid.query.aggregation.MetricManipulationFn; import org.apache.druid.query.aggregation.MetricManipulationFn;
import org.apache.druid.segment.column.ColumnHolder;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;

View File

@ -31,7 +31,8 @@ public class ScanResultValueTimestampComparator implements Comparator<ScanResult
{ {
private final ScanQuery scanQuery; private final ScanQuery scanQuery;
public ScanResultValueTimestampComparator(ScanQuery scanQuery) { public ScanResultValueTimestampComparator(ScanQuery scanQuery)
{
this.scanQuery = scanQuery; this.scanQuery = scanQuery;
} }

View File

@ -19,17 +19,163 @@
package org.apache.druid.query.scan; package org.apache.druid.query.scan;
import org.apache.druid.query.Druids;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.segment.column.ColumnHolder;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
public class ScanResultValueTimestampComparatorTest public class ScanResultValueTimestampComparatorTest
{ {
@Test private static QuerySegmentSpec intervalSpec;
public void comparisonTest() {
@BeforeClass
public void setup() {
intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval(0, 1)));
}
@Test
public void comparisonDescendingListTest()
{
ScanQuery query = Druids.newScanQueryBuilder()
.timeOrder(ScanQuery.TIME_ORDER_DESCENDING)
.resultFormat(ScanQuery.RESULT_FORMAT_LIST)
.dataSource("some src")
.intervals(intervalSpec)
.build();
ScanResultValueTimestampComparator comparator = new ScanResultValueTimestampComparator(query);
ArrayList<HashMap<String, Object>> events1 = new ArrayList<>();
HashMap<String, Object> event1 = new HashMap<>();
event1.put(ColumnHolder.TIME_COLUMN_NAME, new Long(42));
events1.add(event1);
ScanResultValue s1 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events1);
ArrayList<HashMap<String, Object>> events2 = new ArrayList<>();
HashMap<String, Object> event2 = new HashMap<>();
event2.put(ColumnHolder.TIME_COLUMN_NAME, new Long(43));
events2.add(event2);
ScanResultValue s2 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events2);
Assert.assertEquals(1, comparator.compare(s1, s2));
} }
@Test @Test
public void priorityQueueTest() { public void comparisonAscendingListTest()
{
ScanQuery query = Druids.newScanQueryBuilder()
.timeOrder(ScanQuery.TIME_ORDER_ASCENDING)
.resultFormat(ScanQuery.RESULT_FORMAT_LIST)
.dataSource("some src")
.intervals(intervalSpec)
.build();
ScanResultValueTimestampComparator comparator = new ScanResultValueTimestampComparator(query);
ArrayList<HashMap<String, Object>> events1 = new ArrayList<>();
HashMap<String, Object> event1 = new HashMap<>();
event1.put(ColumnHolder.TIME_COLUMN_NAME, new Long(42));
events1.add(event1);
ScanResultValue s1 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events1);
ArrayList<HashMap<String, Object>> events2 = new ArrayList<>();
HashMap<String, Object> event2 = new HashMap<>();
event2.put(ColumnHolder.TIME_COLUMN_NAME, new Long(43));
events2.add(event2);
ScanResultValue s2 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events2);
Assert.assertEquals(-1, comparator.compare(s1, s2));
}
@Test
public void comparisonDescendingCompactedListTest()
{
ScanQuery query = Druids.newScanQueryBuilder()
.timeOrder(ScanQuery.TIME_ORDER_DESCENDING)
.resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST)
.dataSource("some src")
.intervals(intervalSpec)
.build();
ScanResultValueTimestampComparator comparator = new ScanResultValueTimestampComparator(query);
List<List<Object>> events1 = new ArrayList<>();
List<Object> event1 = Collections.singletonList(new Long(42));
events1.add(event1);
ScanResultValue s1 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events1);
List<List<Object>> events2 = new ArrayList<>();
List<Object> event2 = Collections.singletonList(new Long(43));
events2.add(event2);
ScanResultValue s2 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events2);
Assert.assertEquals(1, comparator.compare(s1, s2));
}
@Test
public void comparisonAscendingCompactedListTest()
{
ScanQuery query = Druids.newScanQueryBuilder()
.timeOrder(ScanQuery.TIME_ORDER_ASCENDING)
.resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST)
.dataSource("some src")
.intervals(intervalSpec)
.build();
ScanResultValueTimestampComparator comparator = new ScanResultValueTimestampComparator(query);
List<List<Object>> events1 = new ArrayList<>();
List<Object> event1 = Collections.singletonList(new Long(42));
events1.add(event1);
ScanResultValue s1 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events1);
List<List<Object>> events2 = new ArrayList<>();
List<Object> event2 = Collections.singletonList(new Long(43));
events2.add(event2);
ScanResultValue s2 = new ScanResultValue(
"segmentId",
Collections.singletonList(ColumnHolder.TIME_COLUMN_NAME),
events2);
Assert.assertEquals(-1, comparator.compare(s1, s2));
} }
} }