diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index d42b7053789..04e18b5fe27 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -291,5 +291,10 @@ codestyle/checkstyle.xml. "/> + + + + + diff --git a/core/src/main/java/org/apache/druid/java/util/common/DateTimes.java b/core/src/main/java/org/apache/druid/java/util/common/DateTimes.java index 94f1295d2e6..de1fc403b01 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/DateTimes.java +++ b/core/src/main/java/org/apache/druid/java/util/common/DateTimes.java @@ -107,7 +107,17 @@ public final class DateTimes public static DateTime of(String instant) { - return new DateTime(instant, ISOChronology.getInstanceUTC()); + try { + return new DateTime(instant, ISOChronology.getInstanceUTC()); + } + catch (IllegalArgumentException ex) { + try { + return new DateTime(Long.valueOf(instant), ISOChronology.getInstanceUTC()); + } + catch (IllegalArgumentException ex2) { + throw ex; + } + } } public static DateTime of( diff --git a/core/src/main/java/org/apache/druid/java/util/common/URIs.java b/core/src/main/java/org/apache/druid/java/util/common/URIs.java new file mode 100644 index 00000000000..f2476e71f09 --- /dev/null +++ b/core/src/main/java/org/apache/druid/java/util/common/URIs.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.java.util.common; + +import com.google.common.base.Preconditions; + +import java.net.URI; + +public final class URIs +{ + public static URI parse(String strUri, String defaultScheme) + { + Preconditions.checkNotNull(strUri, "strUri"); + Preconditions.checkNotNull(defaultScheme, "defaultScheme"); + final String[] tokens = strUri.split("://"); + if (tokens.length == 1) { + return URI.create(StringUtils.format("%s://%s", defaultScheme, strUri)); + } else { + return URI.create(strUri); + } + } + + private URIs() + { + } +} diff --git a/core/src/test/java/org/apache/druid/java/util/common/DateTimesTest.java b/core/src/test/java/org/apache/druid/java/util/common/DateTimesTest.java index 61bc746ca29..15f30336c85 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/DateTimesTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/DateTimesTest.java @@ -37,4 +37,23 @@ public class DateTimesTest Assert.assertTrue(DateTimes.COMMON_DATE_TIME_PATTERN.matcher(dt.toString()).matches()); } } + + @Test + public void testStringToDateTimeConversion() + { + String seconds = "2018-01-30T06:00:00"; + DateTime dt2 = DateTimes.of(seconds); + Assert.assertEquals("2018-01-30T06:00:00.000Z", dt2.toString()); + + String milis = "1517292000000"; + DateTime dt1 = DateTimes.of(milis); + Assert.assertEquals("2018-01-30T06:00:00.000Z", dt1.toString()); + } + + @Test(expected = IllegalArgumentException.class) + public void testStringToDateTimeConverstion_RethrowInitialException() + { + String invalid = "51729200AZ"; + DateTimes.of(invalid); + } } diff --git a/core/src/test/java/org/apache/druid/java/util/common/URIsTest.java b/core/src/test/java/org/apache/druid/java/util/common/URIsTest.java new file mode 100644 index 00000000000..bb65e8e5981 --- /dev/null +++ b/core/src/test/java/org/apache/druid/java/util/common/URIsTest.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.java.util.common; + +import org.junit.Assert; +import org.junit.Test; + +import java.net.URI; + +public class URIsTest +{ + @Test + public void testFullUri() + { + final String strUri = "https://test-user@127.0.0.1:8000/test/path?test-query#test-fragment"; + final URI uri = URIs.parse(strUri, "http"); + + Assert.assertEquals("https", uri.getScheme()); + Assert.assertEquals("test-user", uri.getUserInfo()); + Assert.assertEquals("127.0.0.1", uri.getHost()); + Assert.assertEquals(8000, uri.getPort()); + Assert.assertEquals("/test/path", uri.getPath()); + Assert.assertEquals("test-query", uri.getQuery()); + Assert.assertEquals("test-fragment", uri.getFragment()); + } + + @Test + public void testWithoutScheme() + { + final String strUri = "test-user@127.0.0.1:8000/test/path?test-query#test-fragment"; + final URI uri = URIs.parse(strUri, "http"); + + Assert.assertEquals("http", uri.getScheme()); + Assert.assertEquals("test-user", uri.getUserInfo()); + Assert.assertEquals("127.0.0.1", uri.getHost()); + Assert.assertEquals(8000, uri.getPort()); + Assert.assertEquals("/test/path", uri.getPath()); + Assert.assertEquals("test-query", uri.getQuery()); + Assert.assertEquals("test-fragment", uri.getFragment()); + } + + @Test + public void testSimpleUri() + { + final String strUri = "127.0.0.1:8000"; + final URI uri = URIs.parse(strUri, "https"); + + Assert.assertEquals("https", uri.getScheme()); + Assert.assertNull(uri.getUserInfo()); + Assert.assertEquals("127.0.0.1", uri.getHost()); + Assert.assertEquals(8000, uri.getPort()); + } +} diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java index 1690ef9b389..da70643ae40 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/sql/HllSketchSqlAggregatorTest.java @@ -274,7 +274,7 @@ public class HllSketchSqlAggregatorTest extends CalciteTestBase null, null ), - BaseCalciteQueryTest.NOT(BaseCalciteQueryTest.SELECTOR("dim2", "", null)) + BaseCalciteQueryTest.not(BaseCalciteQueryTest.selector("dim2", "", null)) ), new HllSketchBuildAggregatorFactory( "a3", diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java index 65d5717890b..a29f6493dab 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java @@ -342,7 +342,7 @@ public class DoublesSketchAggregatorTest } @Test - public void QueryingDataWithFieldNameValueAsFloatInsteadOfSketch() throws Exception + public void queryingDataWithFieldNameValueAsFloatInsteadOfSketch() throws Exception { Sequence seq = helper.createIndexAndRunQueryOnSegment( new File(this.getClass().getClassLoader().getResource("quantiles/doubles_build_data.tsv").getFile()), @@ -418,7 +418,7 @@ public class DoublesSketchAggregatorTest } @Test - public void TimeSeriesQueryInputAsFloat() throws Exception + public void timeSeriesQueryInputAsFloat() throws Exception { Sequence seq = timeSeriesHelper.createIndexAndRunQueryOnSegment( new File(this.getClass().getClassLoader().getResource("quantiles/doubles_build_data.tsv").getFile()), diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java index 919a59618a0..939e396950a 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/sql/ThetaSketchSqlAggregatorTest.java @@ -278,7 +278,7 @@ public class ThetaSketchSqlAggregatorTest extends CalciteTestBase null, null ), - BaseCalciteQueryTest.NOT(BaseCalciteQueryTest.SELECTOR("dim2", "", null)) + BaseCalciteQueryTest.not(BaseCalciteQueryTest.selector("dim2", "", null)) ), new SketchMergeAggregatorFactory( "a3", diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index 0b44cf182ee..335599adf63 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -114,12 +114,12 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -146,7 +146,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .virtualColumns() .filters( @@ -155,7 +155,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest createExprMacroTable() ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -178,7 +178,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .virtualColumns() .filters( @@ -187,7 +187,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest createExprMacroTable() ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -214,7 +214,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( new OrDimFilter( @@ -222,7 +222,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest new BloomDimFilter("dim2", BloomKFilterHolder.fromBloomKFilter(filter2), null) ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadataTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadataTest.java index 89f5ce8b0e7..5b609132af2 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadataTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadataTest.java @@ -28,10 +28,10 @@ import java.util.Map; public class KafkaDataSourceMetadataTest { - private static final KafkaDataSourceMetadata KM0 = KM("foo", ImmutableMap.of()); - private static final KafkaDataSourceMetadata KM1 = KM("foo", ImmutableMap.of(0, 2L, 1, 3L)); - private static final KafkaDataSourceMetadata KM2 = KM("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)); - private static final KafkaDataSourceMetadata KM3 = KM("foo", ImmutableMap.of(0, 2L, 2, 5L)); + private static final KafkaDataSourceMetadata KM0 = km("foo", ImmutableMap.of()); + private static final KafkaDataSourceMetadata KM1 = km("foo", ImmutableMap.of(0, 2L, 1, 3L)); + private static final KafkaDataSourceMetadata KM2 = km("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)); + private static final KafkaDataSourceMetadata KM3 = km("foo", ImmutableMap.of(0, 2L, 2, 5L)); @Test public void testMatches() @@ -70,27 +70,27 @@ public class KafkaDataSourceMetadataTest public void testPlus() { Assert.assertEquals( - KM("foo", ImmutableMap.of(0, 2L, 1, 3L, 2, 5L)), + km("foo", ImmutableMap.of(0, 2L, 1, 3L, 2, 5L)), KM1.plus(KM3) ); Assert.assertEquals( - KM("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)), + km("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)), KM0.plus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)), + km("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)), KM1.plus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of(0, 2L, 1, 3L, 2, 5L)), + km("foo", ImmutableMap.of(0, 2L, 1, 3L, 2, 5L)), KM2.plus(KM1) ); Assert.assertEquals( - KM("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)), + km("foo", ImmutableMap.of(0, 2L, 1, 4L, 2, 5L)), KM2.plus(KM2) ); } @@ -99,32 +99,32 @@ public class KafkaDataSourceMetadataTest public void testMinus() { Assert.assertEquals( - KM("foo", ImmutableMap.of(1, 3L)), + km("foo", ImmutableMap.of(1, 3L)), KM1.minus(KM3) ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + km("foo", ImmutableMap.of()), KM0.minus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + km("foo", ImmutableMap.of()), KM1.minus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of(2, 5L)), + km("foo", ImmutableMap.of(2, 5L)), KM2.minus(KM1) ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + km("foo", ImmutableMap.of()), KM2.minus(KM2) ); } - private static KafkaDataSourceMetadata KM(String topic, Map offsets) + private static KafkaDataSourceMetadata km(String topic, Map offsets) { return new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, offsets)); } diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java index 9d9d39a3671..81f3f20d3e9 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -139,7 +139,6 @@ import org.apache.druid.segment.loading.DataSegmentPusher; import org.apache.druid.segment.loading.LocalDataSegmentPusher; import org.apache.druid.segment.loading.LocalDataSegmentPusherConfig; import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.appenderator.AppenderatorImpl; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifier; @@ -278,21 +277,21 @@ public class KafkaIndexTaskTest private static List> generateRecords(String topic) { return ImmutableList.of( - new ProducerRecord<>(topic, 0, null, JB("2008", "a", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2009", "b", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2010", "c", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2011", "d", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2011", "e", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2008", "a", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2009", "b", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2010", "c", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2011", "d", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2011", "e", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")), new ProducerRecord<>(topic, 0, null, StringUtils.toUtf8("unparseable")), new ProducerRecord<>(topic, 0, null, StringUtils.toUtf8("unparseable2")), new ProducerRecord<>(topic, 0, null, null), - new ProducerRecord<>(topic, 0, null, JB("2013", "f", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2049", "f", "y", "notanumber", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2049", "f", "y", "10", "notanumber", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2049", "f", "y", "10", "20.0", "notanumber")), - new ProducerRecord<>(topic, 1, null, JB("2012", "g", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 1, null, JB("2011", "h", "y", "10", "20.0", "1.0")) + new ProducerRecord<>(topic, 0, null, jb("2013", "f", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2049", "f", "y", "notanumber", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2049", "f", "y", "10", "notanumber", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2049", "f", "y", "10", "20.0", "notanumber")), + new ProducerRecord<>(topic, 1, null, jb("2012", "g", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 1, null, jb("2011", "h", "y", "10", "20.0", "1.0")) ); } @@ -412,8 +411,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -462,8 +461,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -571,13 +570,13 @@ public class KafkaIndexTaskTest Assert.assertEquals(1, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc5 = SD(task, "2011/P1D", 1); - SegmentDescriptor desc6 = SD(task, "2012/P1D", 0); - SegmentDescriptor desc7 = SD(task, "2013/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc5 = sd(task, "2011/P1D", 1); + SegmentDescriptor desc6 = sd(task, "2012/P1D", 0); + SegmentDescriptor desc7 = sd(task, "2013/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4, desc5, desc6, desc7), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 10L, 1, 2L))), @@ -697,6 +696,7 @@ public class KafkaIndexTaskTest } final Map nextOffsets = ImmutableMap.copyOf(task.getRunner().getCurrentOffsets()); + Assert.assertTrue(checkpoint2.getPartitionSequenceNumberMap().equals(nextOffsets)); task.getRunner().setEndOffsets(nextOffsets, false); @@ -728,15 +728,20 @@ public class KafkaIndexTaskTest Assert.assertEquals(8, task.getRunner().getRowIngestionMeters().getProcessed()); Assert.assertEquals(3, task.getRunner().getRowIngestionMeters().getUnparseable()); Assert.assertEquals(1, task.getRunner().getRowIngestionMeters().getThrownAway()); - + // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc5 = SD(task, "2011/P1D", 1); - SegmentDescriptor desc6 = SD(task, "2012/P1D", 0); - SegmentDescriptor desc7 = SD(task, "2013/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc5 = sd(task, "2011/P1D", 1); + SegmentDescriptor desc6 = sd(task, "2012/P1D", 0); + SegmentDescriptor desc7 = sd(task, "2013/P1D", 0); + Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4, desc5, desc6, desc7), publishedDescriptors()); + Assert.assertEquals( + new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 10L, 1, 2L))), + metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource())); + Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4, desc5, desc6, desc7), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 10L, 1, 2L))), @@ -845,8 +850,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 2L, 1, 0L))), @@ -866,13 +871,13 @@ public class KafkaIndexTaskTest } List> records = ImmutableList.of( - new ProducerRecord<>(topic, 0, null, JB("2008", "a", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2009", "b", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2010", "c", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2011", "d", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2011", "D", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2012", "e", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2009", "B", "y", "10", "20.0", "1.0")) + new ProducerRecord<>(topic, 0, null, jb("2008", "a", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2009", "b", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2010", "c", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2011", "d", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2011", "D", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2012", "e", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2009", "B", "y", "10", "20.0", "1.0")) ); final String baseSequenceName = "sequence0"; @@ -974,8 +979,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(2, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -1024,9 +1029,9 @@ public class KafkaIndexTaskTest Assert.assertEquals(2, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2010/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2010/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -1084,7 +1089,7 @@ public class KafkaIndexTaskTest Assert.assertEquals(4, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2009/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2009/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -1165,8 +1170,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -1213,8 +1218,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -1308,10 +1313,10 @@ public class KafkaIndexTaskTest Assert.assertEquals(1, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2013/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2049/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2013/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2049/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 13L))), @@ -1464,8 +1469,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -1529,8 +1534,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(1, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata, should all be from the first task - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -1582,8 +1587,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(TaskState.SUCCESS, future1.get().getStatusCode()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertNull(metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource())); @@ -1600,8 +1605,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(1, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc3 = SD(task2, "2011/P1D", 1); - SegmentDescriptor desc4 = SD(task2, "2013/P1D", 0); + SegmentDescriptor desc3 = sd(task2, "2011/P1D", 1); + SegmentDescriptor desc4 = sd(task2, "2013/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertNull(metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource())); @@ -1644,11 +1649,11 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); // desc3 will not be created in KafkaIndexTask (0.12.x) as it does not create per Kafka partition Druid segments - SegmentDescriptor desc3 = SD(task, "2011/P1D", 1); - SegmentDescriptor desc4 = SD(task, "2012/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2011/P1D", 1); + SegmentDescriptor desc4 = sd(task, "2012/P1D", 0); Assert.assertEquals(isIncrementalHandoffSupported ? ImmutableSet.of(desc1, desc2, desc4) : ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); @@ -1723,9 +1728,9 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); - SegmentDescriptor desc3 = SD(task2, "2012/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); + SegmentDescriptor desc3 = sd(task2, "2012/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L, 1, 1L))), @@ -1821,8 +1826,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 6L))), @@ -1910,8 +1915,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 6L))), @@ -2038,8 +2043,8 @@ public class KafkaIndexTaskTest Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 5L))), @@ -2179,10 +2184,10 @@ public class KafkaIndexTaskTest Assert.assertEquals(1, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2013/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2049/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2013/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2049/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertEquals( new KafkaDataSourceMetadata(new SeekableStreamPartitions<>(topic, ImmutableMap.of(0, 13L))), @@ -2560,9 +2565,7 @@ public class KafkaIndexTaskTest this::makeTimeseriesAndScanConglomerate, Execs.directExecutor(), // queryExecutorService EasyMock.createMock(MonitorScheduler.class), - new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, testUtils.getTestObjectMapper()) - ), + new SegmentLoaderFactory(null, testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), @@ -2673,7 +2676,7 @@ public class KafkaIndexTaskTest return results.isEmpty() ? 0L : DimensionHandlerUtils.nullToZero(results.get(0).getValue().getLongMetric("rows")); } - private static byte[] JB(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1) + private static byte[] jb(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1) { try { return new ObjectMapper().writeValueAsBytes( @@ -2692,7 +2695,7 @@ public class KafkaIndexTaskTest } } - private SegmentDescriptor SD(final Task task, final String intervalString, final int partitionNum) + private SegmentDescriptor sd(final Task task, final String intervalString, final int partitionNum) { final Interval interval = Intervals.of(intervalString); return new SegmentDescriptor(interval, getLock(task, interval).getVersion(), partitionNum); diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java index f944bf04610..a5e75c917c6 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java @@ -66,25 +66,25 @@ public class KafkaRecordSupplierTest private static List> generateRecords(String topic) { return ImmutableList.of( - new ProducerRecord<>(topic, 0, null, JB("2008", "a", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2009", "b", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2010", "c", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2011", "d", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2011", "e", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2008", "a", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2009", "b", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2010", "c", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2011", "d", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2011", "e", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")), new ProducerRecord<>(topic, 0, null, StringUtils.toUtf8("unparseable")), new ProducerRecord<>(topic, 0, null, StringUtils.toUtf8("unparseable2")), new ProducerRecord<>(topic, 0, null, null), - new ProducerRecord<>(topic, 0, null, JB("2013", "f", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 0, null, JB("2049", "f", "y", "notanumber", "20.0", "1.0")), - new ProducerRecord<>(topic, 1, null, JB("2049", "f", "y", "10", "notanumber", "1.0")), - new ProducerRecord<>(topic, 1, null, JB("2049", "f", "y", "10", "20.0", "notanumber")), - new ProducerRecord<>(topic, 1, null, JB("2012", "g", "y", "10", "20.0", "1.0")), - new ProducerRecord<>(topic, 1, null, JB("2011", "h", "y", "10", "20.0", "1.0")) + new ProducerRecord<>(topic, 0, null, jb("2013", "f", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 0, null, jb("2049", "f", "y", "notanumber", "20.0", "1.0")), + new ProducerRecord<>(topic, 1, null, jb("2049", "f", "y", "10", "notanumber", "1.0")), + new ProducerRecord<>(topic, 1, null, jb("2049", "f", "y", "10", "20.0", "notanumber")), + new ProducerRecord<>(topic, 1, null, jb("2012", "g", "y", "10", "20.0", "1.0")), + new ProducerRecord<>(topic, 1, null, jb("2011", "h", "y", "10", "20.0", "1.0")) ); } - private static byte[] JB(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1) + private static byte[] jb(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1) { try { return new ObjectMapper().writeValueAsBytes( diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisDataSourceMetadataTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisDataSourceMetadataTest.java index f1e3b0fca65..2c5bce1744b 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisDataSourceMetadataTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisDataSourceMetadataTest.java @@ -29,10 +29,10 @@ import java.util.Map; public class KinesisDataSourceMetadataTest { - private static final KinesisDataSourceMetadata KM0 = KM("foo", ImmutableMap.of()); - private static final KinesisDataSourceMetadata KM1 = KM("foo", ImmutableMap.of("0", "2L", "1", "3L")); - private static final KinesisDataSourceMetadata KM2 = KM("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")); - private static final KinesisDataSourceMetadata KM3 = KM("foo", ImmutableMap.of("0", "2L", "2", "5L")); + private static final KinesisDataSourceMetadata KM0 = km("foo", ImmutableMap.of()); + private static final KinesisDataSourceMetadata KM1 = km("foo", ImmutableMap.of("0", "2L", "1", "3L")); + private static final KinesisDataSourceMetadata KM2 = km("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")); + private static final KinesisDataSourceMetadata KM3 = km("foo", ImmutableMap.of("0", "2L", "2", "5L")); @Test public void testMatches() @@ -71,27 +71,27 @@ public class KinesisDataSourceMetadataTest public void testPlus() { Assert.assertEquals( - KM("foo", ImmutableMap.of("0", "2L", "1", "3L", "2", "5L")), + km("foo", ImmutableMap.of("0", "2L", "1", "3L", "2", "5L")), KM1.plus(KM3) ); Assert.assertEquals( - KM("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")), + km("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")), KM0.plus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")), + km("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")), KM1.plus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of("0", "2L", "1", "3L", "2", "5L")), + km("foo", ImmutableMap.of("0", "2L", "1", "3L", "2", "5L")), KM2.plus(KM1) ); Assert.assertEquals( - KM("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")), + km("foo", ImmutableMap.of("0", "2L", "1", "4L", "2", "5L")), KM2.plus(KM2) ); } @@ -100,32 +100,32 @@ public class KinesisDataSourceMetadataTest public void testMinus() { Assert.assertEquals( - KM("foo", ImmutableMap.of("1", "3L")), + km("foo", ImmutableMap.of("1", "3L")), KM1.minus(KM3) ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + km("foo", ImmutableMap.of()), KM0.minus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + km("foo", ImmutableMap.of()), KM1.minus(KM2) ); Assert.assertEquals( - KM("foo", ImmutableMap.of("2", "5L")), + km("foo", ImmutableMap.of("2", "5L")), KM2.minus(KM1) ); Assert.assertEquals( - KM("foo", ImmutableMap.of()), + km("foo", ImmutableMap.of()), KM2.minus(KM2) ); } - private static KinesisDataSourceMetadata KM(String stream, Map sequences) + private static KinesisDataSourceMetadata km(String stream, Map sequences) { return new KinesisDataSourceMetadata(new SeekableStreamPartitions<>(stream, sequences)); } diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java index f0ef143e5ee..78ce481980b 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java @@ -135,7 +135,6 @@ import org.apache.druid.segment.loading.DataSegmentPusher; import org.apache.druid.segment.loading.LocalDataSegmentPusher; import org.apache.druid.segment.loading.LocalDataSegmentPusherConfig; import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.appenderator.AppenderatorImpl; import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; @@ -199,26 +198,26 @@ public class KinesisIndexTaskTest extends EasyMockSupport private static String shardId0 = "0"; private static KinesisRecordSupplier recordSupplier; private static List> records = ImmutableList.of( - new OrderedPartitionableRecord<>(stream, "1", "0", JB("2008", "a", "y", "10", "20.0", "1.0")), - new OrderedPartitionableRecord<>(stream, "1", "1", JB("2009", "b", "y", "10", "20.0", "1.0")), - new OrderedPartitionableRecord<>(stream, "1", "2", JB("2010", "c", "y", "10", "20.0", "1.0")), - new OrderedPartitionableRecord<>(stream, "1", "3", JB("2011", "d", "y", "10", "20.0", "1.0")), - new OrderedPartitionableRecord<>(stream, "1", "4", JB("2011", "e", "y", "10", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "0", jb("2008", "a", "y", "10", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "1", jb("2009", "b", "y", "10", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "2", jb("2010", "c", "y", "10", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "3", jb("2011", "d", "y", "10", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "4", jb("2011", "e", "y", "10", "20.0", "1.0")), new OrderedPartitionableRecord<>( stream, "1", "5", - JB("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0") + jb("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0") ), new OrderedPartitionableRecord<>(stream, "1", "6", Collections.singletonList(StringUtils.toUtf8("unparseable"))), new OrderedPartitionableRecord<>(stream, "1", "7", Collections.singletonList(StringUtils.toUtf8("unparseable2"))), new OrderedPartitionableRecord<>(stream, "1", "8", Collections.singletonList(StringUtils.toUtf8("{}"))), - new OrderedPartitionableRecord<>(stream, "1", "9", JB("2013", "f", "y", "10", "20.0", "1.0")), - new OrderedPartitionableRecord<>(stream, "1", "10", JB("2049", "f", "y", "notanumber", "20.0", "1.0")), - new OrderedPartitionableRecord<>(stream, "1", "11", JB("2049", "f", "y", "10", "notanumber", "1.0")), - new OrderedPartitionableRecord<>(stream, "1", "12", JB("2049", "f", "y", "10", "20.0", "notanumber")), - new OrderedPartitionableRecord<>(stream, "0", "0", JB("2012", "g", "y", "10", "20.0", "1.0")), - new OrderedPartitionableRecord<>(stream, "0", "1", JB("2011", "h", "y", "10", "20.0", "1.0")) + new OrderedPartitionableRecord<>(stream, "1", "9", jb("2013", "f", "y", "10", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "10", jb("2049", "f", "y", "notanumber", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "11", jb("2049", "f", "y", "10", "notanumber", "1.0")), + new OrderedPartitionableRecord<>(stream, "1", "12", jb("2049", "f", "y", "10", "20.0", "notanumber")), + new OrderedPartitionableRecord<>(stream, "0", "0", jb("2012", "g", "y", "10", "20.0", "1.0")), + new OrderedPartitionableRecord<>(stream, "0", "1", jb("2011", "h", "y", "10", "20.0", "1.0")) ); private static ServiceEmitter emitter; @@ -405,8 +404,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>( @@ -485,8 +484,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2012/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2012/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -610,13 +609,13 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc5 = SD(task, "2011/P1D", 1); - SegmentDescriptor desc6 = SD(task, "2012/P1D", 0); - SegmentDescriptor desc7 = SD(task, "2013/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc5 = sd(task, "2011/P1D", 1); + SegmentDescriptor desc6 = sd(task, "2012/P1D", 0); + SegmentDescriptor desc7 = sd(task, "2013/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4, desc5, desc6, desc7), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>(stream, ImmutableMap.of( @@ -771,12 +770,12 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc5 = SD(task, "2049/P1D", 0); - SegmentDescriptor desc7 = SD(task, "2013/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc5 = sd(task, "2049/P1D", 0); + SegmentDescriptor desc7 = sd(task, "2013/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4, desc5, desc7), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>(stream, ImmutableMap.of( @@ -858,8 +857,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(2, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -941,9 +940,9 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(2, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2008/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2009/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2010/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2008/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2009/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2010/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -1034,7 +1033,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(4, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2009/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2009/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -1172,8 +1171,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>(stream, ImmutableMap.of( @@ -1249,8 +1248,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>(stream, ImmutableMap.of( @@ -1396,10 +1395,10 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(4, task.getRunner().getRowIngestionMeters().getUnparseable()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc3 = SD(task, "2013/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2049/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc3 = sd(task, "2013/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2049/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -1621,8 +1620,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -1732,8 +1731,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata, should all be from the first task - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -1830,8 +1829,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(TaskState.SUCCESS, future1.get().getStatusCode()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertNull(metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource())); @@ -1850,8 +1849,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc3 = SD(task2, "2011/P1D", 1); - SegmentDescriptor desc4 = SD(task2, "2013/P1D", 0); + SegmentDescriptor desc3 = sd(task2, "2011/P1D", 1); + SegmentDescriptor desc4 = sd(task2, "2013/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertNull(metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource())); @@ -1929,9 +1928,9 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); - SegmentDescriptor desc4 = SD(task, "2012/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); + SegmentDescriptor desc4 = sd(task, "2012/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc4), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>(stream, ImmutableMap.of( @@ -2044,10 +2043,10 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); - SegmentDescriptor desc3 = SD(task2, "2011/P1D", 1); - SegmentDescriptor desc4 = SD(task2, "2012/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); + SegmentDescriptor desc3 = sd(task2, "2011/P1D", 1); + SegmentDescriptor desc4 = sd(task2, "2012/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertEquals( @@ -2197,8 +2196,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task2.getRunner().getRowIngestionMeters().getThrownAway()); // Check published segments & metadata - SegmentDescriptor desc1 = SD(task1, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task1, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task1, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( @@ -2318,8 +2317,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>( @@ -2408,8 +2407,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(0, task.getRunner().getRowIngestionMeters().getThrownAway()); // Check published metadata - SegmentDescriptor desc1 = SD(task, "2010/P1D", 0); - SegmentDescriptor desc2 = SD(task, "2011/P1D", 0); + SegmentDescriptor desc1 = sd(task, "2010/P1D", 0); + SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamPartitions<>(stream, ImmutableMap.of( @@ -2765,9 +2764,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport this::makeTimeseriesOnlyConglomerate, Execs.directExecutor(), // queryExecutorService EasyMock.createMock(MonitorScheduler.class), - new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, testUtils.getTestObjectMapper()) - ), + new SegmentLoaderFactory(null, testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), @@ -2880,7 +2877,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport return results.isEmpty() ? 0L : DimensionHandlerUtils.nullToZero(results.get(0).getValue().getLongMetric("rows")); } - private static List JB( + private static List jb( String timestamp, String dim1, String dim2, @@ -2906,7 +2903,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport } } - private SegmentDescriptor SD(final Task task, final String intervalString, final int partitionNum) + private SegmentDescriptor sd(final Task task, final String intervalString, final int partitionNum) { final Interval interval = Intervals.of(intervalString); return new SegmentDescriptor(interval, getLock(task, interval).getVersion(), partitionNum); diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java index 166678cb7eb..dd999208077 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java @@ -76,20 +76,20 @@ public class KinesisRecordSupplierTest extends EasyMockSupport private static Shard shard1; private static KinesisRecordSupplier recordSupplier; private static List shard1Records = ImmutableList.of( - new Record().withData(JB("2011", "d", "y", "10", "20.0", "1.0")).withSequenceNumber("0"), - new Record().withData(JB("2011", "e", "y", "10", "20.0", "1.0")).withSequenceNumber("1"), - new Record().withData(JB("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")).withSequenceNumber("2"), + new Record().withData(jb("2011", "d", "y", "10", "20.0", "1.0")).withSequenceNumber("0"), + new Record().withData(jb("2011", "e", "y", "10", "20.0", "1.0")).withSequenceNumber("1"), + new Record().withData(jb("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")).withSequenceNumber("2"), new Record().withData(ByteBuffer.wrap(StringUtils.toUtf8("unparseable"))).withSequenceNumber("3"), new Record().withData(ByteBuffer.wrap(StringUtils.toUtf8("unparseable2"))).withSequenceNumber("4"), new Record().withData(ByteBuffer.wrap(StringUtils.toUtf8("{}"))).withSequenceNumber("5"), - new Record().withData(JB("2013", "f", "y", "10", "20.0", "1.0")).withSequenceNumber("6"), - new Record().withData(JB("2049", "f", "y", "notanumber", "20.0", "1.0")).withSequenceNumber("7"), - new Record().withData(JB("2012", "g", "y", "10", "20.0", "1.0")).withSequenceNumber("8"), - new Record().withData(JB("2011", "h", "y", "10", "20.0", "1.0")).withSequenceNumber("9") + new Record().withData(jb("2013", "f", "y", "10", "20.0", "1.0")).withSequenceNumber("6"), + new Record().withData(jb("2049", "f", "y", "notanumber", "20.0", "1.0")).withSequenceNumber("7"), + new Record().withData(jb("2012", "g", "y", "10", "20.0", "1.0")).withSequenceNumber("8"), + new Record().withData(jb("2011", "h", "y", "10", "20.0", "1.0")).withSequenceNumber("9") ); private static List shard0Records = ImmutableList.of( - new Record().withData(JB("2008", "a", "y", "10", "20.0", "1.0")).withSequenceNumber("0"), - new Record().withData(JB("2009", "b", "y", "10", "20.0", "1.0")).withSequenceNumber("1") + new Record().withData(jb("2008", "a", "y", "10", "20.0", "1.0")).withSequenceNumber("0"), + new Record().withData(jb("2009", "b", "y", "10", "20.0", "1.0")).withSequenceNumber("1") ); private static List allRecords = ImmutableList.builder() .addAll(shard0Records.stream() @@ -120,7 +120,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport .toList())) .build(); - private static ByteBuffer JB(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1) + private static ByteBuffer jb(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1) { try { return ByteBuffer.wrap(new ObjectMapper().writeValueAsBytes( diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java index 750b9247d79..2ceadb527cd 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java @@ -3560,7 +3560,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ); } - private static List JB( + private static List jb( String timestamp, String dim1, String dim2, diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageDruidModule.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageDruidModule.java index 2bac01896b8..61d145d591e 100644 --- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageDruidModule.java +++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3StorageDruidModule.java @@ -43,6 +43,7 @@ import org.apache.druid.guice.JsonConfigProvider; import org.apache.druid.guice.LazySingleton; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.IAE; +import org.apache.druid.java.util.common.URIs; import org.apache.druid.java.util.common.logger.Logger; import javax.annotation.Nullable; @@ -184,7 +185,8 @@ public class S3StorageDruidModule implements DruidModule final Protocol protocolFromClientConfig = parseProtocol(clientConfig.getProtocol()); final String endpointUrl = endpointConfig.getUrl(); if (StringUtils.isNotEmpty(endpointUrl)) { - final URI uri = URI.create(endpointUrl); + //noinspection ConstantConditions + final URI uri = URIs.parse(endpointUrl, protocolFromClientConfig.toString()); final Protocol protocol = parseProtocol(uri.getScheme()); if (protocol != null && (protocol != protocolFromClientConfig)) { log.warn("[%s] protocol will be used for endpoint [%s]", protocol, endpointUrl); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/SegmentLoaderFactory.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/SegmentLoaderFactory.java index b15d3dfc4bd..83fa9dbb06b 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/SegmentLoaderFactory.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/SegmentLoaderFactory.java @@ -19,7 +19,10 @@ package org.apache.druid.indexing.common; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; +import org.apache.druid.guice.annotations.Json; +import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.loading.SegmentLoader; import org.apache.druid.segment.loading.SegmentLoaderConfig; import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; @@ -29,23 +32,30 @@ import java.io.File; import java.util.Collections; /** + * */ public class SegmentLoaderFactory { - private final SegmentLoaderLocalCacheManager loader; + private final IndexIO indexIO; + private final ObjectMapper jsonMapper; @Inject public SegmentLoaderFactory( - SegmentLoaderLocalCacheManager loader + IndexIO indexIO, + @Json ObjectMapper mapper ) { - this.loader = loader; + this.indexIO = indexIO; + this.jsonMapper = mapper; } public SegmentLoader manufacturate(File storageDir) { - return loader.withConfig( - new SegmentLoaderConfig().withLocations(Collections.singletonList(new StorageLocationConfig().setPath(storageDir))) + return new SegmentLoaderLocalCacheManager( + indexIO, + new SegmentLoaderConfig().withLocations( + Collections.singletonList(new StorageLocationConfig().setPath(storageDir))), + jsonMapper ); } } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java index 740d78d3639..c897de3fc0f 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/CompactionTask.java @@ -29,6 +29,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.google.common.collect.Lists; +import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionSchema.MultiValueHandling; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -40,6 +41,8 @@ import org.apache.druid.data.input.impl.NoopInputRowParser; import org.apache.druid.data.input.impl.StringDimensionSchema; import org.apache.druid.data.input.impl.TimeAndDimsParseSpec; import org.apache.druid.indexer.TaskStatus; +import org.apache.druid.indexing.common.RetryPolicyFactory; +import org.apache.druid.indexing.common.SegmentLoaderFactory; import org.apache.druid.indexing.common.TaskToolbox; import org.apache.druid.indexing.common.actions.SegmentListUsedAction; import org.apache.druid.indexing.common.actions.TaskActionClient; @@ -132,6 +135,15 @@ public class CompactionTask extends AbstractTask @JsonIgnore private final RowIngestionMetersFactory rowIngestionMetersFactory; + @JsonIgnore + private final CoordinatorClient coordinatorClient; + + @JsonIgnore + private final SegmentLoaderFactory segmentLoaderFactory; + + @JsonIgnore + private final RetryPolicyFactory retryPolicyFactory; + @JsonIgnore private List indexTaskSpecs; @@ -153,7 +165,10 @@ public class CompactionTask extends AbstractTask @JacksonInject ObjectMapper jsonMapper, @JacksonInject AuthorizerMapper authorizerMapper, @JacksonInject ChatHandlerProvider chatHandlerProvider, - @JacksonInject RowIngestionMetersFactory rowIngestionMetersFactory + @JacksonInject RowIngestionMetersFactory rowIngestionMetersFactory, + @JacksonInject CoordinatorClient coordinatorClient, + @JacksonInject SegmentLoaderFactory segmentLoaderFactory, + @JacksonInject RetryPolicyFactory retryPolicyFactory ) { super(getOrMakeId(id, TYPE, dataSource), null, taskResource, dataSource, context); @@ -186,6 +201,9 @@ public class CompactionTask extends AbstractTask this.authorizerMapper = authorizerMapper; this.chatHandlerProvider = chatHandlerProvider; this.rowIngestionMetersFactory = rowIngestionMetersFactory; + this.coordinatorClient = coordinatorClient; + this.segmentLoaderFactory = segmentLoaderFactory; + this.retryPolicyFactory = retryPolicyFactory; } @JsonProperty @@ -278,20 +296,23 @@ public class CompactionTask extends AbstractTask metricsSpec, keepSegmentGranularity, segmentGranularity, - jsonMapper + jsonMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ).stream() - .map(spec -> new IndexTask( - getId(), - getGroupId(), - getTaskResource(), - getDataSource(), - spec, - getContext(), - authorizerMapper, - chatHandlerProvider, - rowIngestionMetersFactory - )) - .collect(Collectors.toList()); + .map(spec -> new IndexTask( + getId(), + getGroupId(), + getTaskResource(), + getDataSource(), + spec, + getContext(), + authorizerMapper, + chatHandlerProvider, + rowIngestionMetersFactory + )) + .collect(Collectors.toList()); } if (indexTaskSpecs.isEmpty()) { @@ -338,7 +359,10 @@ public class CompactionTask extends AbstractTask @Nullable final AggregatorFactory[] metricsSpec, @Nullable final Boolean keepSegmentGranularity, @Nullable final Granularity segmentGranularity, - final ObjectMapper jsonMapper + final ObjectMapper jsonMapper, + final CoordinatorClient coordinatorClient, + final SegmentLoaderFactory segmentLoaderFactory, + final RetryPolicyFactory retryPolicyFactory ) throws IOException, SegmentLoadingException { Pair, List>> pair = prepareSegments( @@ -379,7 +403,14 @@ public class CompactionTask extends AbstractTask return Collections.singletonList( new IndexIngestionSpec( dataSchema, - createIoConfig(toolbox, dataSchema, segmentProvider.interval), + createIoConfig( + toolbox, + dataSchema, + segmentProvider.interval, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory + ), compactionTuningConfig ) ); @@ -411,7 +442,14 @@ public class CompactionTask extends AbstractTask specs.add( new IndexIngestionSpec( dataSchema, - createIoConfig(toolbox, dataSchema, interval), + createIoConfig( + toolbox, + dataSchema, + interval, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory + ), compactionTuningConfig ) ); @@ -438,7 +476,14 @@ public class CompactionTask extends AbstractTask return Collections.singletonList( new IndexIngestionSpec( dataSchema, - createIoConfig(toolbox, dataSchema, segmentProvider.interval), + createIoConfig( + toolbox, + dataSchema, + segmentProvider.interval, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory + ), compactionTuningConfig ) ); @@ -446,7 +491,14 @@ public class CompactionTask extends AbstractTask } } - private static IndexIOConfig createIoConfig(TaskToolbox toolbox, DataSchema dataSchema, Interval interval) + private static IndexIOConfig createIoConfig( + TaskToolbox toolbox, + DataSchema dataSchema, + Interval interval, + CoordinatorClient coordinatorClient, + SegmentLoaderFactory segmentLoaderFactory, + RetryPolicyFactory retryPolicyFactory + ) { return new IndexIOConfig( new IngestSegmentFirehoseFactory( @@ -456,7 +508,10 @@ public class CompactionTask extends AbstractTask // set dimensions and metrics names to make sure that the generated dataSchema is used for the firehose dataSchema.getParser().getParseSpec().getDimensionsSpec().getDimensionNames(), Arrays.stream(dataSchema.getAggregators()).map(AggregatorFactory::getName).collect(Collectors.toList()), - toolbox.getIndexIO() + toolbox.getIndexIO(), + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ), false ); @@ -811,7 +866,7 @@ public class CompactionTask extends AbstractTask * targetCompactionSizeBytes cannot be used with {@link IndexTuningConfig#maxRowsPerSegment}, * {@link IndexTuningConfig#maxTotalRows}, or {@link IndexTuningConfig#numShards} together. * {@link #hasPartitionConfig} checks one of those configs is set. - * + *

* This throws an {@link IllegalArgumentException} if targetCompactionSizeBytes is set and hasPartitionConfig * returns true. If targetCompactionSizeBytes is not set, this returns null or * {@link DataSourceCompactionConfig#DEFAULT_TARGET_COMPACTION_SIZE_BYTES} according to the result of @@ -860,6 +915,9 @@ public class CompactionTask extends AbstractTask private final AuthorizerMapper authorizerMapper; private final ChatHandlerProvider chatHandlerProvider; private final RowIngestionMetersFactory rowIngestionMetersFactory; + private final CoordinatorClient coordinatorClient; + private final SegmentLoaderFactory segmentLoaderFactory; + private final RetryPolicyFactory retryPolicyFactory; @Nullable private Interval interval; @@ -885,7 +943,10 @@ public class CompactionTask extends AbstractTask ObjectMapper jsonMapper, AuthorizerMapper authorizerMapper, ChatHandlerProvider chatHandlerProvider, - RowIngestionMetersFactory rowIngestionMetersFactory + RowIngestionMetersFactory rowIngestionMetersFactory, + CoordinatorClient coordinatorClient, + SegmentLoaderFactory segmentLoaderFactory, + RetryPolicyFactory retryPolicyFactory ) { this.dataSource = dataSource; @@ -893,6 +954,9 @@ public class CompactionTask extends AbstractTask this.authorizerMapper = authorizerMapper; this.chatHandlerProvider = chatHandlerProvider; this.rowIngestionMetersFactory = rowIngestionMetersFactory; + this.coordinatorClient = coordinatorClient; + this.segmentLoaderFactory = segmentLoaderFactory; + this.retryPolicyFactory = retryPolicyFactory; } public Builder interval(Interval interval) @@ -968,7 +1032,10 @@ public class CompactionTask extends AbstractTask jsonMapper, authorizerMapper, chatHandlerProvider, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); } } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java index 6d5a0d8e56a..d0e083a2b6e 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java @@ -54,7 +54,6 @@ import org.apache.druid.indexing.common.actions.TaskActionClient; import org.apache.druid.indexing.common.stats.RowIngestionMeters; import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory; import org.apache.druid.indexing.common.stats.TaskRealtimeMetricsMonitor; -import org.apache.druid.indexing.firehose.IngestSegmentFirehoseFactory; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; @@ -84,7 +83,6 @@ import org.apache.druid.segment.realtime.appenderator.SegmentsAndMetadata; import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher; import org.apache.druid.segment.realtime.firehose.ChatHandler; import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; -import org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.AuthorizerMapper; @@ -419,8 +417,6 @@ public class IndexTask extends AbstractTask implements ChatHandler final FirehoseFactory firehoseFactory = ingestionSchema.getIOConfig().getFirehoseFactory(); - setFirehoseFactoryToolbox(firehoseFactory, toolbox); - final File firehoseTempDir = toolbox.getFirehoseTemporaryDir(); // Firehose temporary directory is automatically removed when this IndexTask completes. FileUtils.forceMkdir(firehoseTempDir); @@ -489,25 +485,6 @@ public class IndexTask extends AbstractTask implements ChatHandler } } - // pass toolbox to any IngestSegmentFirehoseFactory - private void setFirehoseFactoryToolbox(FirehoseFactory firehoseFactory, TaskToolbox toolbox) - { - if (firehoseFactory instanceof IngestSegmentFirehoseFactory) { - ((IngestSegmentFirehoseFactory) firehoseFactory).setTaskToolbox(toolbox); - return; - } - - if (firehoseFactory instanceof CombiningFirehoseFactory) { - for (FirehoseFactory delegateFactory : ((CombiningFirehoseFactory) firehoseFactory).getDelegateFactoryList()) { - if (delegateFactory instanceof IngestSegmentFirehoseFactory) { - ((IngestSegmentFirehoseFactory) delegateFactory).setTaskToolbox(toolbox); - } else if (delegateFactory instanceof CombiningFirehoseFactory) { - setFirehoseFactoryToolbox(delegateFactory, toolbox); - } - } - } - } - private Map getTaskCompletionReports() { return TaskReport.buildTaskReports( diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSubTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSubTask.java index 8004243bb3a..435de05892f 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSubTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSubTask.java @@ -44,7 +44,6 @@ import org.apache.druid.indexing.common.task.IndexTask; import org.apache.druid.indexing.common.task.IndexTaskClientFactory; import org.apache.druid.indexing.common.task.TaskResource; import org.apache.druid.indexing.common.task.Tasks; -import org.apache.druid.indexing.firehose.IngestSegmentFirehoseFactory; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.StringUtils; @@ -185,11 +184,6 @@ public class ParallelIndexSubTask extends AbstractTask { final FirehoseFactory firehoseFactory = ingestionSchema.getIOConfig().getFirehoseFactory(); - if (firehoseFactory instanceof IngestSegmentFirehoseFactory) { - // pass toolbox to Firehose - ((IngestSegmentFirehoseFactory) firehoseFactory).setTaskToolbox(toolbox); - } - final File firehoseTempDir = toolbox.getFirehoseTemporaryDir(); // Firehose temporary directory is automatically removed when this IndexTask completes. FileUtils.forceMkdir(firehoseTempDir); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactory.java b/indexing-service/src/main/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactory.java index 8087582ecb5..bae2946bbdc 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactory.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactory.java @@ -30,16 +30,20 @@ import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.FirehoseFactory; import org.apache.druid.data.input.impl.InputRowParser; -import org.apache.druid.indexing.common.TaskToolbox; -import org.apache.druid.indexing.common.actions.SegmentListUsedAction; +import org.apache.druid.indexing.common.RetryPolicy; +import org.apache.druid.indexing.common.RetryPolicyFactory; +import org.apache.druid.indexing.common.SegmentLoaderFactory; import org.apache.druid.java.util.common.parsers.ParseException; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.QueryableIndexStorageAdapter; +import org.apache.druid.segment.loading.SegmentLoader; import org.apache.druid.segment.loading.SegmentLoadingException; import org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose; import org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter; @@ -48,14 +52,17 @@ import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; +import org.joda.time.Duration; import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -68,7 +75,9 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactory dimensions; private final List metrics; private final IndexIO indexIO; - private TaskToolbox taskToolbox; + private final CoordinatorClient coordinatorClient; + private final SegmentLoaderFactory segmentLoaderFactory; + private final RetryPolicyFactory retryPolicyFactory; @JsonCreator public IngestSegmentFirehoseFactory( @@ -77,7 +86,10 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactory dimensions, @JsonProperty("metrics") List metrics, - @JacksonInject IndexIO indexIO + @JacksonInject IndexIO indexIO, + @JacksonInject CoordinatorClient coordinatorClient, + @JacksonInject SegmentLoaderFactory segmentLoaderFactory, + @JacksonInject RetryPolicyFactory retryPolicyFactory ) { Preconditions.checkNotNull(dataSource, "dataSource"); @@ -88,6 +100,9 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactory usedSegments = taskToolbox - .getTaskActionClient() - .submit(new SegmentListUsedAction(dataSource, interval, null)); - final Map segmentFileMap = taskToolbox.fetchSegments(usedSegments); + // This call used to use the TaskActionClient, so for compatibility we use the same retry configuration + // as TaskActionClient. + final RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy(); + List usedSegments; + while (true) { + try { + usedSegments = + coordinatorClient.getDatabaseSegmentDataSourceSegments(dataSource, Collections.singletonList(interval)); + break; + } + catch (Throwable e) { + log.warn(e, "Exception getting database segments"); + final Duration delay = retryPolicy.getAndIncrementRetryDelay(); + if (delay == null) { + throw e; + } else { + final long sleepTime = jitter(delay.getMillis()); + log.info("Will try again in [%s].", new Duration(sleepTime).toString()); + try { + Thread.sleep(sleepTime); + } + catch (InterruptedException e2) { + throw new RuntimeException(e2); + } + } + } + } + + final SegmentLoader segmentLoader = segmentLoaderFactory.manufacturate(temporaryDirectory); + Map segmentFileMap = Maps.newLinkedHashMap(); + for (DataSegment segment : usedSegments) { + segmentFileMap.put(segment, segmentLoader.getSegmentFiles(segment)); + } + final List> timeLineSegments = VersionedIntervalTimeline .forSegments(usedSegments) .lookup(interval); @@ -201,11 +239,18 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactory getUniqueDimensions( List> timelineSegments, @@ -260,7 +305,7 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactory orderedMetrics = uniqueMetrics.inverse(); return IntStream.range(0, orderedMetrics.size()) - .mapToObj(orderedMetrics::get) - .collect(Collectors.toList()); + .mapToObj(orderedMetrics::get) + .collect(Collectors.toList()); } } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java index d9acfd6bd83..0966d1b84c9 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/TaskToolboxTest.java @@ -76,6 +76,7 @@ public class TaskToolboxTest private MonitorScheduler mockMonitorScheduler = EasyMock.createMock(MonitorScheduler.class); private ExecutorService mockQueryExecutorService = EasyMock.createMock(ExecutorService.class); private ObjectMapper ObjectMapper = new ObjectMapper(); + private SegmentLoaderFactory mockSegmentLoaderFactory = EasyMock.createMock(SegmentLoaderFactory.class); private SegmentLoaderLocalCacheManager mockSegmentLoaderLocalCacheManager = EasyMock.createMock(SegmentLoaderLocalCacheManager.class); private Task task = EasyMock.createMock(Task.class); private IndexMergerV9 mockIndexMergerV9 = EasyMock.createMock(IndexMergerV9.class); @@ -107,7 +108,7 @@ public class TaskToolboxTest () -> mockQueryRunnerFactoryConglomerate, mockQueryExecutorService, mockMonitorScheduler, - new SegmentLoaderFactory(mockSegmentLoaderLocalCacheManager), + mockSegmentLoaderFactory, ObjectMapper, mockIndexIO, mockCache, @@ -162,13 +163,13 @@ public class TaskToolboxTest public void testFetchSegments() throws SegmentLoadingException, IOException { File expectedFile = temporaryFolder.newFile(); + EasyMock + .expect(mockSegmentLoaderFactory.manufacturate(EasyMock.anyObject())) + .andReturn(mockSegmentLoaderLocalCacheManager).anyTimes(); EasyMock .expect(mockSegmentLoaderLocalCacheManager.getSegmentFiles(EasyMock.anyObject())) .andReturn(expectedFile).anyTimes(); - EasyMock - .expect(mockSegmentLoaderLocalCacheManager.withConfig(EasyMock.anyObject())) - .andReturn(mockSegmentLoaderLocalCacheManager).anyTimes(); - EasyMock.replay(mockSegmentLoaderLocalCacheManager); + EasyMock.replay(mockSegmentLoaderFactory, mockSegmentLoaderLocalCacheManager); DataSegment dataSegment = DataSegment.builder().dataSource("source").interval(Intervals.of("2012-01-01/P1D")).version("1").size(1).build(); List segments = ImmutableList.of ( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java index b57397ab8cd..b809e7a5ee8 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java @@ -119,7 +119,6 @@ import org.apache.druid.segment.indexing.DataSchema; import org.apache.druid.segment.indexing.RealtimeIOConfig; import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifier; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; @@ -1607,9 +1606,7 @@ public class AppenderatorDriverRealtimeIndexTaskTest () -> conglomerate, Execs.directExecutor(), // queryExecutorService EasyMock.createMock(MonitorScheduler.class), - new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, testUtils.getTestObjectMapper()) - ), + new SegmentLoaderFactory(null, testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java index bbf28708ff8..6ad0ec4b146 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java @@ -23,11 +23,15 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.io.Files; +import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.data.input.impl.CSVParseSpec; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.ParseSpec; import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.indexer.TaskStatus; +import org.apache.druid.indexing.common.RetryPolicyConfig; +import org.apache.druid.indexing.common.RetryPolicyFactory; +import org.apache.druid.indexing.common.SegmentLoaderFactory; import org.apache.druid.indexing.common.TaskToolbox; import org.apache.druid.indexing.common.TestUtils; import org.apache.druid.indexing.common.actions.LocalTaskActionClient; @@ -52,6 +56,7 @@ import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.server.security.AuthTestUtils; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.NumberedShardSpec; +import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -96,12 +101,24 @@ public class CompactionTaskRunTest extends IngestionTestBase ); private RowIngestionMetersFactory rowIngestionMetersFactory; + private CoordinatorClient coordinatorClient; + private SegmentLoaderFactory segmentLoaderFactory; private ExecutorService exec; + private static RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig()); public CompactionTaskRunTest() { TestUtils testUtils = new TestUtils(); rowIngestionMetersFactory = testUtils.getRowIngestionMetersFactory(); + coordinatorClient = new CoordinatorClient(null, null) + { + @Override + public List getDatabaseSegmentDataSourceSegments(String dataSource, List intervals) + { + return getStorageCoordinator().getUsedSegmentsForIntervals(dataSource, intervals); + } + }; + segmentLoaderFactory = new SegmentLoaderFactory(getIndexIO(), getObjectMapper()); } @Before @@ -126,7 +143,10 @@ public class CompactionTaskRunTest extends IngestionTestBase getObjectMapper(), AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask compactionTask = builder @@ -156,7 +176,10 @@ public class CompactionTaskRunTest extends IngestionTestBase getObjectMapper(), AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask compactionTask1 = builder @@ -200,7 +223,10 @@ public class CompactionTaskRunTest extends IngestionTestBase getObjectMapper(), AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask compactionTask1 = builder @@ -248,7 +274,10 @@ public class CompactionTaskRunTest extends IngestionTestBase getObjectMapper(), AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); // day segmentGranularity diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java index f759cc7c1c6..5117c1a31c8 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java @@ -28,6 +28,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.data.input.FirehoseFactory; import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -41,6 +42,9 @@ import org.apache.druid.data.input.impl.TimeAndDimsParseSpec; import org.apache.druid.guice.GuiceAnnotationIntrospector; import org.apache.druid.guice.GuiceInjectableValues; import org.apache.druid.guice.GuiceInjectors; +import org.apache.druid.indexing.common.RetryPolicyConfig; +import org.apache.druid.indexing.common.RetryPolicyFactory; +import org.apache.druid.indexing.common.SegmentLoaderFactory; import org.apache.druid.indexing.common.TaskToolbox; import org.apache.druid.indexing.common.TestUtils; import org.apache.druid.indexing.common.actions.SegmentListUsedAction; @@ -153,12 +157,15 @@ public class CompactionTaskTest private static List AGGREGATORS; private static List SEGMENTS; private static RowIngestionMetersFactory rowIngestionMetersFactory = new TestUtils().getRowIngestionMetersFactory(); + private static Map segmentMap = new HashMap<>(); + private static CoordinatorClient coordinatorClient = new TestCoordinatorClient(segmentMap); private static ObjectMapper objectMapper = setupInjectablesInObjectMapper(new DefaultObjectMapper()); - private static Map segmentMap; + private static RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig()); private final boolean keepSegmentGranularity; private TaskToolbox toolbox; + private SegmentLoaderFactory segmentLoaderFactory; @BeforeClass public static void setupClass() @@ -202,7 +209,6 @@ public class CompactionTaskTest AGGREGATORS.add(new FloatFirstAggregatorFactory("agg_3", "float_dim_3")); AGGREGATORS.add(new DoubleLastAggregatorFactory("agg_4", "double_dim_4")); - segmentMap = new HashMap<>(SEGMENT_INTERVALS.size()); for (int i = 0; i < SEGMENT_INTERVALS.size(); i++) { final Interval segmentInterval = Intervals.of(StringUtils.format("2017-0%d-01/2017-0%d-01", (i + 1), (i + 2))); segmentMap.put( @@ -243,6 +249,8 @@ public class CompactionTaskTest binder.bind(AuthorizerMapper.class).toInstance(AuthTestUtils.TEST_AUTHORIZER_MAPPER); binder.bind(ChatHandlerProvider.class).toInstance(new NoopChatHandlerProvider()); binder.bind(RowIngestionMetersFactory.class).toInstance(rowIngestionMetersFactory); + binder.bind(CoordinatorClient.class).toInstance(coordinatorClient); + binder.bind(SegmentLoaderFactory.class).toInstance(new SegmentLoaderFactory(null, objectMapper)); } ) ) @@ -307,19 +315,21 @@ public class CompactionTaskTest @Before public void setup() { + final IndexIO testIndexIO = new TestIndexIO(objectMapper, segmentMap); toolbox = new TestTaskToolbox( new TestTaskActionClient(new ArrayList<>(segmentMap.keySet())), - new TestIndexIO(objectMapper, segmentMap), + testIndexIO, segmentMap ); + segmentLoaderFactory = new SegmentLoaderFactory(testIndexIO, objectMapper); } @Parameters(name = "keepSegmentGranularity={0}") public static Collection parameters() { return ImmutableList.of( - new Object[] {false}, - new Object[] {true} + new Object[]{false}, + new Object[]{true} ); } @@ -336,7 +346,10 @@ public class CompactionTaskTest objectMapper, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask task = builder .interval(COMPACTION_INTERVAL) @@ -357,7 +370,10 @@ public class CompactionTaskTest objectMapper, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask task = builder .segments(SEGMENTS) @@ -378,7 +394,10 @@ public class CompactionTaskTest objectMapper, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask task = builder @@ -426,7 +445,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration( keepSegmentGranularity @@ -440,7 +462,13 @@ public class CompactionTaskTest ) ); Assert.assertEquals(6, ingestionSpecs.size()); - assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS, SEGMENT_INTERVALS, Granularities.MONTH); + assertIngestionSchema( + ingestionSpecs, + expectedDimensionsSpec, + AGGREGATORS, + SEGMENT_INTERVALS, + Granularities.MONTH + ); } else { Assert.assertEquals(1, ingestionSpecs.size()); assertIngestionSchema( @@ -491,7 +519,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration( keepSegmentGranularity @@ -564,7 +595,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration( keepSegmentGranularity @@ -637,7 +671,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration( keepSegmentGranularity @@ -710,7 +747,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); if (keepSegmentGranularity) { @@ -760,7 +800,10 @@ public class CompactionTaskTest customMetricsSpec, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration( @@ -805,7 +848,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration( keepSegmentGranularity @@ -819,7 +865,13 @@ public class CompactionTaskTest ) ); Assert.assertEquals(6, ingestionSpecs.size()); - assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS, SEGMENT_INTERVALS, Granularities.MONTH); + assertIngestionSchema( + ingestionSpecs, + expectedDimensionsSpec, + AGGREGATORS, + SEGMENT_INTERVALS, + Granularities.MONTH + ); } else { Assert.assertEquals(1, ingestionSpecs.size()); assertIngestionSchema( @@ -850,7 +902,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); } @@ -871,7 +926,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); } @@ -886,7 +944,10 @@ public class CompactionTaskTest objectMapper, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask task = builder @@ -934,7 +995,10 @@ public class CompactionTaskTest null, keepSegmentGranularity, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); } @@ -949,7 +1013,10 @@ public class CompactionTaskTest null, null, new PeriodGranularity(Period.months(3), null, null), - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = ImmutableList.of( new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))) @@ -982,7 +1049,10 @@ public class CompactionTaskTest null, false, new PeriodGranularity(Period.months(3), null, null), - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = ImmutableList.of( new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))) @@ -1015,7 +1085,10 @@ public class CompactionTaskTest null, null, null, - objectMapper + objectMapper, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final List expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration( true @@ -1048,7 +1121,10 @@ public class CompactionTaskTest objectMapper, AuthTestUtils.TEST_AUTHORIZER_MAPPER, null, - rowIngestionMetersFactory + rowIngestionMetersFactory, + coordinatorClient, + segmentLoaderFactory, + retryPolicyFactory ); final CompactionTask task = builder .interval(COMPACTION_INTERVAL) @@ -1222,6 +1298,23 @@ public class CompactionTaskTest } } + private static class TestCoordinatorClient extends CoordinatorClient + { + private final Map segmentMap; + + TestCoordinatorClient(Map segmentMap) + { + super(null, null); + this.segmentMap = segmentMap; + } + + @Override + public List getDatabaseSegmentDataSourceSegments(String dataSource, List intervals) + { + return new ArrayList<>(segmentMap.keySet()); + } + } + private static class TestTaskToolbox extends TaskToolbox { private final Map segmentFileMap; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java index 2c3d3c3d873..2db92726b4c 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -107,7 +107,6 @@ import org.apache.druid.segment.indexing.RealtimeIOConfig; import org.apache.druid.segment.indexing.RealtimeTuningConfig; import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.FireDepartment; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifier; @@ -1076,9 +1075,7 @@ public class RealtimeIndexTaskTest () -> conglomerate, Execs.directExecutor(), // queryExecutorService EasyMock.createMock(MonitorScheduler.class), - new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, testUtils.getTestObjectMapper()) - ), + new SegmentLoaderFactory(null, testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 3329419cf33..7f44ad6a528 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -29,6 +29,9 @@ import com.google.common.collect.ImmutableSet; import com.google.common.io.Files; import com.google.inject.Binder; import com.google.inject.Module; +import org.apache.druid.client.coordinator.CoordinatorClient; +import org.apache.druid.data.input.Firehose; +import org.apache.druid.data.input.FirehoseFactory; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.InputRowParser; @@ -39,21 +42,16 @@ import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.guice.GuiceAnnotationIntrospector; import org.apache.druid.guice.GuiceInjectableValues; import org.apache.druid.guice.GuiceInjectors; +import org.apache.druid.indexing.common.RetryPolicyConfig; +import org.apache.druid.indexing.common.RetryPolicyFactory; import org.apache.druid.indexing.common.SegmentLoaderFactory; -import org.apache.druid.indexing.common.TaskToolboxFactory; import org.apache.druid.indexing.common.TestUtils; -import org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory; -import org.apache.druid.indexing.common.actions.TaskActionToolbox; -import org.apache.druid.indexing.common.actions.TaskAuditLogConfig; -import org.apache.druid.indexing.common.config.TaskConfig; import org.apache.druid.indexing.common.config.TaskStorageConfig; import org.apache.druid.indexing.common.task.NoopTask; -import org.apache.druid.indexing.common.task.NoopTestTaskFileWriter; import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.overlord.HeapMemoryTaskStorage; import org.apache.druid.indexing.overlord.TaskLockbox; import org.apache.druid.indexing.overlord.TaskStorage; -import org.apache.druid.indexing.overlord.supervisor.SupervisorManager; import org.apache.druid.java.util.common.IOE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; @@ -61,7 +59,6 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.math.expr.ExprMacroTable; -import org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator; import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.filter.SelectorDimFilter; @@ -72,16 +69,9 @@ import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; -import org.apache.druid.segment.loading.DataSegmentArchiver; -import org.apache.druid.segment.loading.DataSegmentKiller; -import org.apache.druid.segment.loading.DataSegmentMover; -import org.apache.druid.segment.loading.DataSegmentPusher; import org.apache.druid.segment.loading.LocalDataSegmentPuller; import org.apache.druid.segment.loading.LocalLoadSpec; -import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; -import org.apache.druid.segment.loading.StorageLocationConfig; -import org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose; +import org.apache.druid.segment.realtime.firehose.CombiningFirehoseFactory; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import org.apache.druid.segment.transform.ExpressionTransform; import org.apache.druid.segment.transform.TransformSpec; @@ -103,15 +93,12 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; -import java.net.URI; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -170,157 +157,21 @@ public class IngestSegmentFirehoseFactoryTest } INDEX_MERGER_V9.persist(index, persistDir, indexSpec, null); - final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null) + final CoordinatorClient cc = new CoordinatorClient(null, null) { - private final Set published = new HashSet<>(); - @Override - public List getUsedSegmentsForInterval(String dataSource, Interval interval) + public List getDatabaseSegmentDataSourceSegments(String dataSource, List intervals) { return ImmutableList.copyOf(segmentSet); } - - @Override - public List getUsedSegmentsForIntervals(String dataSource, List interval) - { - return ImmutableList.copyOf(segmentSet); - } - - @Override - public List getUnusedSegmentsForInterval(String dataSource, Interval interval) - { - return ImmutableList.of(); - } - - @Override - public Set announceHistoricalSegments(Set segments) - { - Set added = new HashSet<>(); - for (final DataSegment segment : segments) { - if (published.add(segment)) { - added.add(segment); - } - } - - return ImmutableSet.copyOf(added); - } - - @Override - public void deleteSegments(Set segments) - { - // do nothing - } }; - final LocalTaskActionClientFactory tac = new LocalTaskActionClientFactory( - TASK_STORAGE, - new TaskActionToolbox( - TASK_LOCKBOX, - TASK_STORAGE, - mdc, - newMockEmitter(), - EasyMock.createMock(SupervisorManager.class) - ), - new TaskAuditLogConfig(false) - ); + SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class); EasyMock.replay(notifierFactory); - SegmentLoaderConfig segmentLoaderConfig = new SegmentLoaderConfig() - { - @Override - public List getLocations() - { - return new ArrayList<>(); - } - }; - final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory( - new TaskConfig(tmpDir.getAbsolutePath(), null, null, 50000, null, false, null, null), - tac, - newMockEmitter(), - new DataSegmentPusher() - { - @Deprecated - @Override - public String getPathForHadoop(String dataSource) - { - return getPathForHadoop(); - } + final SegmentLoaderFactory slf = new SegmentLoaderFactory(null, MAPPER); + final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig()); - @Override - public String getPathForHadoop() - { - throw new UnsupportedOperationException(); - } - - @Override - public DataSegment push(File file, DataSegment segment, boolean useUniquePath) - { - return segment; - } - - @Override - public Map makeLoadSpec(URI uri) - { - throw new UnsupportedOperationException(); - } - }, - new DataSegmentKiller() - { - @Override - public void kill(DataSegment segments) - { - - } - - @Override - public void killAll() - { - throw new UnsupportedOperationException("not implemented"); - } - }, - new DataSegmentMover() - { - @Override - public DataSegment move(DataSegment dataSegment, Map targetLoadSpec) - { - return dataSegment; - } - }, - new DataSegmentArchiver() - { - @Override - public DataSegment archive(DataSegment segment) - { - return segment; - } - - @Override - public DataSegment restore(DataSegment segment) - { - return segment; - } - }, - null, // segment announcer - null, - notifierFactory, - null, // query runner factory conglomerate corporation unionized collective - null, // query executor service - null, // monitor scheduler - new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, MAPPER) - ), - MAPPER, - INDEX_IO, - null, - null, - null, - INDEX_MERGER_V9, - null, - null, - null, - null, - new NoopTestTaskFileWriter() - ); Collection values = new ArrayList<>(); for (InputRowParser parser : Arrays.asList( ROW_PARSER, @@ -342,27 +193,35 @@ public class IngestSegmentFirehoseFactoryTest null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME) )) { - final IngestSegmentFirehoseFactory factory = new IngestSegmentFirehoseFactory( - TASK.getDataSource(), - Intervals.ETERNITY, - new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), - dim_names, - metric_names, - INDEX_IO - ); - factory.setTaskToolbox(taskToolboxFactory.build(TASK)); - values.add( - new Object[]{ - StringUtils.format( - "DimNames[%s]MetricNames[%s]ParserDimNames[%s]", - dim_names == null ? "null" : "dims", - metric_names == null ? "null" : "metrics", - parser == ROW_PARSER ? "dims" : "null" - ), - factory, - parser - } - ); + for (Boolean wrapInCombining : Arrays.asList(false, true)) { + final IngestSegmentFirehoseFactory isfFactory = new IngestSegmentFirehoseFactory( + TASK.getDataSource(), + Intervals.ETERNITY, + new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), + dim_names, + metric_names, + INDEX_IO, + cc, + slf, + retryPolicyFactory + ); + final FirehoseFactory factory = wrapInCombining + ? new CombiningFirehoseFactory(ImmutableList.of(isfFactory)) + : isfFactory; + values.add( + new Object[]{ + StringUtils.format( + "DimNames[%s]MetricNames[%s]ParserDimNames[%s]WrapInCombining[%s]", + dim_names == null ? "null" : "dims", + metric_names == null ? "null" : "metrics", + parser == ROW_PARSER ? "dims" : "null", + wrapInCombining + ), + factory, + parser + } + ); + } } } } @@ -407,7 +266,7 @@ public class IngestSegmentFirehoseFactoryTest public IngestSegmentFirehoseFactoryTest( String testName, - IngestSegmentFirehoseFactory factory, + FirehoseFactory factory, InputRowParser rowParser ) { @@ -436,7 +295,7 @@ public class IngestSegmentFirehoseFactoryTest private static final File persistDir = Paths.get(tmpDir.getAbsolutePath(), "indexTestMerger").toFile(); private static final List segmentSet = new ArrayList<>(MAX_SHARD_NUMBER); - private final IngestSegmentFirehoseFactory factory; + private final FirehoseFactory factory; private final InputRowParser rowParser; private static final InputRowParser> ROW_PARSER = new MapInputRowParser( @@ -518,15 +377,20 @@ public class IngestSegmentFirehoseFactoryTest @Test public void sanityTest() { - Assert.assertEquals(TASK.getDataSource(), factory.getDataSource()); - if (factory.getDimensions() != null) { - Assert.assertArrayEquals(new String[]{DIM_NAME}, factory.getDimensions().toArray()); + if (factory instanceof CombiningFirehoseFactory) { + // This method tests IngestSegmentFirehoseFactory-specific methods. + return; } - Assert.assertEquals(Intervals.ETERNITY, factory.getInterval()); - if (factory.getMetrics() != null) { + final IngestSegmentFirehoseFactory isfFactory = (IngestSegmentFirehoseFactory) factory; + Assert.assertEquals(TASK.getDataSource(), isfFactory.getDataSource()); + if (isfFactory.getDimensions() != null) { + Assert.assertArrayEquals(new String[]{DIM_NAME}, isfFactory.getDimensions().toArray()); + } + Assert.assertEquals(Intervals.ETERNITY, isfFactory.getInterval()); + if (isfFactory.getMetrics() != null) { Assert.assertEquals( ImmutableSet.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME), - ImmutableSet.copyOf(factory.getMetrics()) + ImmutableSet.copyOf(isfFactory.getMetrics()) ); } } @@ -536,15 +400,17 @@ public class IngestSegmentFirehoseFactoryTest { Assert.assertEquals(MAX_SHARD_NUMBER.longValue(), segmentSet.size()); Integer rowcount = 0; - try (final IngestSegmentFirehose firehose = - (IngestSegmentFirehose) - factory.connect(rowParser, null)) { + try (final Firehose firehose = factory.connect(rowParser, null)) { while (firehose.hasMore()) { InputRow row = firehose.nextRow(); Assert.assertArrayEquals(new String[]{DIM_NAME}, row.getDimensions().toArray()); Assert.assertArrayEquals(new String[]{DIM_VALUE}, row.getDimension(DIM_NAME).toArray()); Assert.assertEquals(METRIC_LONG_VALUE.longValue(), row.getMetric(METRIC_LONG_NAME)); - Assert.assertEquals(METRIC_FLOAT_VALUE, row.getMetric(METRIC_FLOAT_NAME).floatValue(), METRIC_FLOAT_VALUE * 0.0001); + Assert.assertEquals( + METRIC_FLOAT_VALUE, + row.getMetric(METRIC_FLOAT_NAME).floatValue(), + METRIC_FLOAT_VALUE * 0.0001 + ); ++rowcount; } } @@ -563,9 +429,8 @@ public class IngestSegmentFirehoseFactoryTest ) ); int skipped = 0; - try (final IngestSegmentFirehose firehose = - (IngestSegmentFirehose) - factory.connect(transformSpec.decorate(rowParser), null)) { + try (final Firehose firehose = + factory.connect(transformSpec.decorate(rowParser), null)) { while (firehose.hasMore()) { InputRow row = firehose.nextRow(); if (row == null) { diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index 70e877bad45..8ca24d95d72 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; +import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.MapBasedInputRow; @@ -34,20 +35,10 @@ import org.apache.druid.data.input.impl.InputRowParser; import org.apache.druid.data.input.impl.JSONParseSpec; import org.apache.druid.data.input.impl.MapInputRowParser; import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.indexing.common.RetryPolicyConfig; +import org.apache.druid.indexing.common.RetryPolicyFactory; import org.apache.druid.indexing.common.SegmentLoaderFactory; -import org.apache.druid.indexing.common.TaskLock; -import org.apache.druid.indexing.common.TaskLockType; -import org.apache.druid.indexing.common.TaskToolboxFactory; import org.apache.druid.indexing.common.TestUtils; -import org.apache.druid.indexing.common.actions.LockAcquireAction; -import org.apache.druid.indexing.common.actions.SegmentListUsedAction; -import org.apache.druid.indexing.common.actions.TaskAction; -import org.apache.druid.indexing.common.actions.TaskActionClient; -import org.apache.druid.indexing.common.actions.TaskActionClientFactory; -import org.apache.druid.indexing.common.config.TaskConfig; -import org.apache.druid.indexing.common.task.NoopTask; -import org.apache.druid.indexing.common.task.NoopTestTaskFileWriter; -import org.apache.druid.indexing.common.task.Task; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; @@ -59,12 +50,8 @@ import org.apache.druid.segment.IndexSpec; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IndexSizeExceededException; -import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; -import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import org.apache.druid.segment.transform.TransformSpec; -import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.partition.LinearShardSpec; @@ -165,7 +152,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest FileUtils.deleteDirectory(tmpDir); } - private static TestCase TC( + private static TestCase tc( String intervalString, int expectedCount, long expectedSum, @@ -187,7 +174,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest ); } - private static DataSegmentMaker DS( + private static DataSegmentMaker ds( String intervalString, String version, int partitionNum, @@ -197,7 +184,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest return new DataSegmentMaker(Intervals.of(intervalString), version, partitionNum, Arrays.asList(rows)); } - private static InputRow IR(String timeString, long metricValue) + private static InputRow ir(String timeString, long metricValue) { return new MapBasedInputRow( DateTimes.of(timeString).getMillis(), @@ -249,115 +236,68 @@ public class IngestSegmentFirehoseFactoryTimelineTest public static Collection constructorFeeder() { final List testCases = ImmutableList.of( - TC( + tc( "2000/2000T02", 3, 7, - DS("2000/2000T01", "v1", 0, IR("2000", 1), IR("2000T00:01", 2)), - DS("2000T01/2000T02", "v1", 0, IR("2000T01", 4)) + ds("2000/2000T01", "v1", 0, ir("2000", 1), ir("2000T00:01", 2)), + ds("2000T01/2000T02", "v1", 0, ir("2000T01", 4)) ) /* Adjacent segments */, - TC( + tc( "2000/2000T02", 3, 7, - DS("2000/2000T02", "v1", 0, IR("2000", 1), IR("2000T00:01", 2), IR("2000T01", 8)), - DS("2000T01/2000T02", "v2", 0, IR("2000T01:01", 4)) + ds("2000/2000T02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8)), + ds("2000T01/2000T02", "v2", 0, ir("2000T01:01", 4)) ) /* 1H segment overlaid on top of 2H segment */, - TC( + tc( "2000/2000-01-02", 4, 23, - DS("2000/2000-01-02", "v1", 0, IR("2000", 1), IR("2000T00:01", 2), IR("2000T01", 8), IR("2000T02", 16)), - DS("2000T01/2000T02", "v2", 0, IR("2000T01:01", 4)) + ds("2000/2000-01-02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8), ir("2000T02", 16)), + ds("2000T01/2000T02", "v2", 0, ir("2000T01:01", 4)) ) /* 1H segment overlaid on top of 1D segment */, - TC( + tc( "2000/2000T02", 4, 15, - DS("2000/2000T02", "v1", 0, IR("2000", 1), IR("2000T00:01", 2), IR("2000T01", 8)), - DS("2000/2000T02", "v1", 1, IR("2000T01:01", 4)) + ds("2000/2000T02", "v1", 0, ir("2000", 1), ir("2000T00:01", 2), ir("2000T01", 8)), + ds("2000/2000T02", "v1", 1, ir("2000T01:01", 4)) ) /* Segment set with two segments for the same interval */, - TC( + tc( "2000T01/2000T02", 1, 2, - DS("2000/2000T03", "v1", 0, IR("2000", 1), IR("2000T01", 2), IR("2000T02", 4)) + ds("2000/2000T03", "v1", 0, ir("2000", 1), ir("2000T01", 2), ir("2000T02", 4)) ) /* Segment wider than desired interval */, - TC( + tc( "2000T02/2000T04", 2, 12, - DS("2000/2000T03", "v1", 0, IR("2000", 1), IR("2000T01", 2), IR("2000T02", 4)), - DS("2000T03/2000T04", "v1", 0, IR("2000T03", 8)) + ds("2000/2000T03", "v1", 0, ir("2000", 1), ir("2000T01", 2), ir("2000T02", 4)), + ds("2000T03/2000T04", "v1", 0, ir("2000T03", 8)) ) /* Segment intersecting desired interval */ ); final List constructors = new ArrayList<>(); for (final TestCase testCase : testCases) { - final TaskActionClient taskActionClient = new TaskActionClient() + SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class); + EasyMock.replay(notifierFactory); + final SegmentLoaderFactory slf = new SegmentLoaderFactory(null, MAPPER); + final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig()); + final CoordinatorClient cc = new CoordinatorClient(null, null) { @Override - public RetType submit(TaskAction taskAction) + public List getDatabaseSegmentDataSourceSegments(String dataSource, List intervals) { - if (taskAction instanceof SegmentListUsedAction) { - // Expect the interval we asked for - final SegmentListUsedAction action = (SegmentListUsedAction) taskAction; - if (action.getIntervals().equals(ImmutableList.of(testCase.interval))) { - return (RetType) ImmutableList.copyOf(testCase.segments); - } else { - throw new IllegalArgumentException("WTF"); - } - } else if (taskAction instanceof LockAcquireAction) { - return (RetType) new TaskLock(TaskLockType.EXCLUSIVE, null, DATA_SOURCE, Intervals.of("2000/2001"), "v1", 0); + // Expect the interval we asked for + if (intervals.equals(ImmutableList.of(testCase.interval))) { + return ImmutableList.copyOf(testCase.segments); } else { - throw new UnsupportedOperationException(); + throw new IllegalArgumentException("WTF"); } } }; - SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class); - EasyMock.replay(notifierFactory); - SegmentLoaderConfig segmentLoaderConfig = new SegmentLoaderConfig() - { - @Override - public List getLocations() - { - return new ArrayList<>(); - } - }; - final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory( - new TaskConfig(testCase.tmpDir.getAbsolutePath(), null, null, 50000, null, false, null, null), - new TaskActionClientFactory() - { - @Override - public TaskActionClient create(Task task) - { - return taskActionClient; - } - }, - new NoopServiceEmitter(), - null, // segment pusher - null, // segment killer - null, // segment mover - null, // segment archiver - null, // segment announcer, - null, - notifierFactory, - null, // query runner factory conglomerate corporation unionized collective - null, // query executor service - null, // monitor scheduler - new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, MAPPER) - ), - MAPPER, - INDEX_IO, - null, - null, - null, - INDEX_MERGER_V9, - null, - null, - null, - null, - new NoopTestTaskFileWriter() - ); final IngestSegmentFirehoseFactory factory = new IngestSegmentFirehoseFactory( DATA_SOURCE, testCase.interval, new TrueDimFilter(), Arrays.asList(DIMENSIONS), Arrays.asList(METRICS), - INDEX_IO + INDEX_IO, + cc, + slf, + retryPolicyFactory ); - factory.setTaskToolbox(taskToolboxFactory.build(NoopTask.create(DATA_SOURCE))); constructors.add( new Object[]{ diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java index c8f9380714b..49315d3973d 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/SingleTaskBackgroundRunnerTest.java @@ -37,7 +37,6 @@ import org.apache.druid.segment.loading.NoopDataSegmentArchiver; import org.apache.druid.segment.loading.NoopDataSegmentKiller; import org.apache.druid.segment.loading.NoopDataSegmentMover; import org.apache.druid.segment.loading.NoopDataSegmentPusher; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; import org.apache.druid.server.DruidNode; import org.apache.druid.server.coordination.NoopDataSegmentAnnouncer; import org.apache.druid.server.initialization.ServerConfig; @@ -94,7 +93,7 @@ public class SingleTaskBackgroundRunnerTest null, null, null, - new SegmentLoaderFactory(EasyMock.createMock(SegmentLoaderLocalCacheManager.class)), + new SegmentLoaderFactory(null, utils.getTestObjectMapper()), utils.getTestObjectMapper(), utils.getTestIndexIO(), null, diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java index 265150c6056..d7e51fa9429 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java @@ -106,7 +106,6 @@ import org.apache.druid.segment.loading.DataSegmentPusher; import org.apache.druid.segment.loading.LocalDataSegmentKiller; import org.apache.druid.segment.loading.LocalDataSegmentPusherConfig; import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.FireDepartment; import org.apache.druid.segment.realtime.FireDepartmentTest; @@ -195,16 +194,16 @@ public class TaskLifecycleTest private static DateTime now = DateTimes.nowUtc(); private static final Iterable realtimeIdxTaskInputRows = ImmutableList.of( - IR(now.toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 1.0f), - IR(now.plus(new Period(Hours.ONE)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 2.0f), - IR(now.plus(new Period(Hours.TWO)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 3.0f) + ir(now.toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 1.0f), + ir(now.plus(new Period(Hours.ONE)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 2.0f), + ir(now.plus(new Period(Hours.TWO)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 3.0f) ); private static final Iterable IdxTaskInputRows = ImmutableList.of( - IR("2010-01-01T01", "x", "y", 1), - IR("2010-01-01T01", "x", "z", 1), - IR("2010-01-02T01", "a", "b", 2), - IR("2010-01-02T01", "a", "c", 1) + ir("2010-01-01T01", "x", "y", 1), + ir("2010-01-01T01", "x", "z", 1), + ir("2010-01-02T01", "a", "b", 2), + ir("2010-01-02T01", "a", "c", 1) ); @Rule @@ -241,7 +240,7 @@ public class TaskLifecycleTest return new NoopServiceEmitter(); } - private static InputRow IR(String dt, String dim1, String dim2, float met) + private static InputRow ir(String dt, String dim1, String dim2, float met) { return new MapBasedInputRow( DateTimes.of(dt).getMillis(), @@ -611,9 +610,7 @@ public class TaskLifecycleTest () -> queryRunnerFactoryConglomerate, // query runner factory conglomerate corporation unionized collective Execs.directExecutor(), // query executor service monitorScheduler, // monitor scheduler - new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, new DefaultObjectMapper()) - ), + new SegmentLoaderFactory(null, new DefaultObjectMapper()), MAPPER, INDEX_IO, MapCache.create(0), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java index 749d44af805..b86b654847a 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskManagerTest.java @@ -41,7 +41,6 @@ import org.apache.druid.indexing.overlord.TestTaskRunner; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMergerV9; import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import org.apache.druid.server.coordination.ChangeRequestHistory; @@ -120,7 +119,7 @@ public class WorkerTaskManagerTest null, null, null, - new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, loaderConfig, jsonMapper)), + new SegmentLoaderFactory(null, jsonMapper), jsonMapper, indexIO, null, diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java index 20eb5fc3d67..4afdd3cf9d5 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -46,9 +46,6 @@ import org.apache.druid.indexing.overlord.TestRemoteTaskRunnerConfig; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.IndexMergerV9; -import org.apache.druid.segment.loading.SegmentLoaderConfig; -import org.apache.druid.segment.loading.SegmentLoaderLocalCacheManager; -import org.apache.druid.segment.loading.StorageLocationConfig; import org.apache.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory; import org.apache.druid.server.DruidNode; import org.apache.druid.server.initialization.IndexerZkConfig; @@ -62,10 +59,10 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import java.util.ArrayList; import java.util.List; /** + * */ public class WorkerTaskMonitorTest { @@ -169,20 +166,8 @@ public class WorkerTaskMonitorTest new TaskToolboxFactory( taskConfig, taskActionClientFactory, - null, null, null, null, null, null, null, notifierFactory, null, null, null, new SegmentLoaderFactory( - new SegmentLoaderLocalCacheManager( - null, - new SegmentLoaderConfig() - { - @Override - public List getLocations() - { - return new ArrayList<>(); - } - }, - jsonMapper - ) - ), + null, null, null, null, null, null, null, notifierFactory, null, null, null, + new SegmentLoaderFactory(null, jsonMapper), jsonMapper, indexIO, null, diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java index 8db528560e4..15acc4ce7ab 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java @@ -42,6 +42,7 @@ import org.apache.druid.segment.column.ColumnCapabilities; import org.apache.druid.segment.column.ColumnCapabilitiesImpl; import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.column.ComplexColumn; +import org.apache.druid.segment.column.DictionaryEncodedColumn; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.data.IndexedInts; import org.apache.druid.segment.serde.ComplexMetricSerde; @@ -194,30 +195,38 @@ public class SegmentAnalyzer final ColumnHolder columnHolder ) { - long size = 0; - Comparable min = null; Comparable max = null; + long size = 0; + final int cardinality; + if (capabilities.hasBitmapIndexes()) { + final BitmapIndex bitmapIndex = columnHolder.getBitmapIndex(); + cardinality = bitmapIndex.getCardinality(); - if (!capabilities.hasBitmapIndexes()) { - return ColumnAnalysis.error("string_no_bitmap"); - } - - final BitmapIndex bitmapIndex = columnHolder.getBitmapIndex(); - final int cardinality = bitmapIndex.getCardinality(); - - if (analyzingSize()) { - for (int i = 0; i < cardinality; ++i) { - String value = bitmapIndex.getValue(i); - if (value != null) { - size += StringUtils.estimatedBinaryLengthAsUTF8(value) * bitmapIndex.getBitmap(bitmapIndex.getIndex(value)).size(); + if (analyzingSize()) { + for (int i = 0; i < cardinality; ++i) { + String value = bitmapIndex.getValue(i); + if (value != null) { + size += StringUtils.estimatedBinaryLengthAsUTF8(value) * bitmapIndex.getBitmap(bitmapIndex.getIndex(value)) + .size(); + } } } - } - if (analyzingMinMax() && cardinality > 0) { - min = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(0)); - max = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(cardinality - 1)); + if (analyzingMinMax() && cardinality > 0) { + min = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(0)); + max = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(cardinality - 1)); + } + } else if (capabilities.isDictionaryEncoded()) { + // fallback if no bitmap index + DictionaryEncodedColumn theColumn = (DictionaryEncodedColumn) columnHolder.getColumn(); + cardinality = theColumn.getCardinality(); + if (analyzingMinMax() && cardinality > 0) { + min = NullHandling.nullToEmptyIfNeeded(theColumn.lookupName(0)); + max = NullHandling.nullToEmptyIfNeeded(theColumn.lookupName(cardinality - 1)); + } + } else { + cardinality = 0; } return new ColumnAnalysis( diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java index d6ac1bff1d0..5d8be6d36c2 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java @@ -394,7 +394,7 @@ public class GroupByLimitPushDownInsufficientBufferTest strategySelector, new GroupByQueryQueryToolChest( strategySelector, - NoopIntervalChunkingQueryRunnerDecorator() + noopIntervalChunkingQueryRunnerDecorator() ) ); @@ -402,7 +402,7 @@ public class GroupByLimitPushDownInsufficientBufferTest tooSmallStrategySelector, new GroupByQueryQueryToolChest( tooSmallStrategySelector, - NoopIntervalChunkingQueryRunnerDecorator() + noopIntervalChunkingQueryRunnerDecorator() ) ); } @@ -679,7 +679,7 @@ public class GroupByLimitPushDownInsufficientBufferTest } }; - public static IntervalChunkingQueryRunnerDecorator NoopIntervalChunkingQueryRunnerDecorator() + public static IntervalChunkingQueryRunnerDecorator noopIntervalChunkingQueryRunnerDecorator() { return new IntervalChunkingQueryRunnerDecorator(null, null, null) { diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java index a177398ea37..581e80b4e34 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java @@ -426,7 +426,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest strategySelector, new GroupByQueryQueryToolChest( strategySelector, - NoopIntervalChunkingQueryRunnerDecorator() + noopIntervalChunkingQueryRunnerDecorator() ) ); @@ -434,7 +434,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest strategySelector2, new GroupByQueryQueryToolChest( strategySelector2, - NoopIntervalChunkingQueryRunnerDecorator() + noopIntervalChunkingQueryRunnerDecorator() ) ); } @@ -780,7 +780,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest } }; - public static IntervalChunkingQueryRunnerDecorator NoopIntervalChunkingQueryRunnerDecorator() + public static IntervalChunkingQueryRunnerDecorator noopIntervalChunkingQueryRunnerDecorator() { return new IntervalChunkingQueryRunnerDecorator(null, null, null) { diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java index 27e41bacc33..62e7a40b392 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java @@ -287,7 +287,7 @@ public class GroupByMultiSegmentTest strategySelector, new GroupByQueryQueryToolChest( strategySelector, - NoopIntervalChunkingQueryRunnerDecorator() + noopIntervalChunkingQueryRunnerDecorator() ) ); } @@ -419,7 +419,7 @@ public class GroupByMultiSegmentTest } }; - public static IntervalChunkingQueryRunnerDecorator NoopIntervalChunkingQueryRunnerDecorator() + public static IntervalChunkingQueryRunnerDecorator noopIntervalChunkingQueryRunnerDecorator() { return new IntervalChunkingQueryRunnerDecorator(null, null, null) { @Override diff --git a/processing/src/test/java/org/apache/druid/query/lookup/LookupConfigTest.java b/processing/src/test/java/org/apache/druid/query/lookup/LookupConfigTest.java index ca18913ab7b..b8f295f1bf6 100644 --- a/processing/src/test/java/org/apache/druid/query/lookup/LookupConfigTest.java +++ b/processing/src/test/java/org/apache/druid/query/lookup/LookupConfigTest.java @@ -36,7 +36,7 @@ public class LookupConfigTest public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Test - public void TestSerDesr() throws IOException + public void testSerDesr() throws IOException { LookupConfig lookupConfig = new LookupConfig(temporaryFolder.newFile().getAbsolutePath()); Assert.assertEquals( diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java index 31e70d37664..5c613acf658 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java @@ -83,10 +83,16 @@ public class SegmentMetadataQueryTest public static QueryRunner makeMMappedQueryRunner( SegmentId segmentId, boolean rollup, + boolean bitmaps, QueryRunnerFactory factory ) { - QueryableIndex index = rollup ? TestIndex.getMMappedTestIndex() : TestIndex.getNoRollupMMappedTestIndex(); + QueryableIndex index; + if (bitmaps) { + index = rollup ? TestIndex.getMMappedTestIndex() : TestIndex.getNoRollupMMappedTestIndex(); + } else { + index = TestIndex.getNoBitmapMMappedTestIndex(); + } return QueryRunnerTestHelper.makeQueryRunner( factory, segmentId, @@ -99,10 +105,16 @@ public class SegmentMetadataQueryTest public static QueryRunner makeIncrementalIndexQueryRunner( SegmentId segmentId, boolean rollup, + boolean bitmaps, QueryRunnerFactory factory ) { - IncrementalIndex index = rollup ? TestIndex.getIncrementalTestIndex() : TestIndex.getNoRollupIncrementalTestIndex(); + IncrementalIndex index; + if (bitmaps) { + index = rollup ? TestIndex.getIncrementalTestIndex() : TestIndex.getNoRollupIncrementalTestIndex(); + } else { + index = TestIndex.getNoBitmapIncrementalTestIndex(); + } return QueryRunnerTestHelper.makeQueryRunner( factory, segmentId, @@ -121,17 +133,19 @@ public class SegmentMetadataQueryTest private final SegmentMetadataQuery testQuery; private final SegmentAnalysis expectedSegmentAnalysis1; private final SegmentAnalysis expectedSegmentAnalysis2; + private final boolean bitmaps; - @Parameterized.Parameters(name = "mmap1 = {0}, mmap2 = {1}, rollup1 = {2}, rollup2 = {3}, differentIds = {4}") + @Parameterized.Parameters(name = "mmap1 = {0}, mmap2 = {1}, rollup1 = {2}, rollup2 = {3}, differentIds = {4}, bitmaps={5}") public static Collection constructorFeeder() { return ImmutableList.of( - new Object[]{true, true, true, true, false}, - new Object[]{true, false, true, false, false}, - new Object[]{false, true, true, false, false}, - new Object[]{false, false, false, false, false}, - new Object[]{false, false, true, true, false}, - new Object[]{false, false, false, true, true} + new Object[]{true, true, true, true, false, true}, + new Object[]{true, false, true, false, false, true}, + new Object[]{false, true, true, false, false, true}, + new Object[]{false, false, false, false, false, true}, + new Object[]{false, false, true, true, false, true}, + new Object[]{false, false, false, true, true, true}, + new Object[]{true, true, false, false, false, false} ); } @@ -140,22 +154,24 @@ public class SegmentMetadataQueryTest boolean mmap2, boolean rollup1, boolean rollup2, - boolean differentIds + boolean differentIds, + boolean bitmaps ) { final SegmentId id1 = SegmentId.dummy(differentIds ? "testSegment1" : "testSegment"); final SegmentId id2 = SegmentId.dummy(differentIds ? "testSegment2" : "testSegment"); this.runner1 = mmap1 - ? makeMMappedQueryRunner(id1, rollup1, FACTORY) - : makeIncrementalIndexQueryRunner(id1, rollup1, FACTORY); + ? makeMMappedQueryRunner(id1, rollup1, bitmaps, FACTORY) + : makeIncrementalIndexQueryRunner(id1, rollup1, bitmaps, FACTORY); this.runner2 = mmap2 - ? makeMMappedQueryRunner(id2, rollup2, FACTORY) - : makeIncrementalIndexQueryRunner(id2, rollup2, FACTORY); + ? makeMMappedQueryRunner(id2, rollup2, bitmaps, FACTORY) + : makeIncrementalIndexQueryRunner(id2, rollup2, bitmaps, FACTORY); this.mmap1 = mmap1; this.mmap2 = mmap2; this.rollup1 = rollup1; this.rollup2 = rollup2; this.differentIds = differentIds; + this.bitmaps = bitmaps; testQuery = Druids.newSegmentMetadataQueryBuilder() .dataSource("testing") .intervals("2013/2014") @@ -169,6 +185,16 @@ public class SegmentMetadataQueryTest .merge(true) .build(); + int preferedSize1 = 0; + int placementSize2 = 0; + int overallSize1 = 119691; + int overallSize2 = 119691; + if (bitmaps) { + preferedSize1 = mmap1 ? 10881 : 10764; + placementSize2 = mmap2 ? 10881 : 0; + overallSize1 = mmap1 ? 167493 : 168188; + overallSize2 = mmap2 ? 167493 : 168188; + } expectedSegmentAnalysis1 = new SegmentAnalysis( id1.toString(), ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), @@ -187,7 +213,7 @@ public class SegmentMetadataQueryTest new ColumnAnalysis( ValueType.STRING.toString(), false, - mmap1 ? 10881 : 10764, + preferedSize1, 1, "preferred", "preferred", @@ -203,7 +229,7 @@ public class SegmentMetadataQueryTest null, null ) - ), mmap1 ? 167493 : 168188, + ), overallSize1, 1209, null, null, @@ -228,7 +254,7 @@ public class SegmentMetadataQueryTest new ColumnAnalysis( ValueType.STRING.toString(), false, - mmap2 ? 10881 : 0, + placementSize2, 1, null, null, @@ -245,7 +271,7 @@ public class SegmentMetadataQueryTest null ) // null_column will be included only for incremental index, which makes a little bigger result than expected - ), mmap2 ? 167493 : 168188, + ), overallSize2, 1209, null, null, @@ -470,10 +496,16 @@ public class SegmentMetadataQueryTest @Test public void testSegmentMetadataQueryWithDefaultAnalysisMerge() { + int size1 = 0; + int size2 = 0; + if (bitmaps) { + size1 = mmap1 ? 10881 : 10764; + size2 = mmap2 ? 10881 : 10764; + } ColumnAnalysis analysis = new ColumnAnalysis( ValueType.STRING.toString(), false, - (mmap1 ? 10881 : 10764) + (mmap2 ? 10881 : 10764), + size1 + size2, 1, "preferred", "preferred", @@ -485,10 +517,16 @@ public class SegmentMetadataQueryTest @Test public void testSegmentMetadataQueryWithDefaultAnalysisMerge2() { + int size1 = 0; + int size2 = 0; + if (bitmaps) { + size1 = mmap1 ? 6882 : 6808; + size2 = mmap2 ? 6882 : 6808; + } ColumnAnalysis analysis = new ColumnAnalysis( ValueType.STRING.toString(), false, - (mmap1 ? 6882 : 6808) + (mmap2 ? 6882 : 6808), + size1 + size2, 3, "spot", "upfront", @@ -500,10 +538,16 @@ public class SegmentMetadataQueryTest @Test public void testSegmentMetadataQueryWithDefaultAnalysisMerge3() { + int size1 = 0; + int size2 = 0; + if (bitmaps) { + size1 = mmap1 ? 9765 : 9660; + size2 = mmap2 ? 9765 : 9660; + } ColumnAnalysis analysis = new ColumnAnalysis( ValueType.STRING.toString(), false, - (mmap1 ? 9765 : 9660) + (mmap2 ? 9765 : 9660), + size1 + size2, 9, "automotive", "travel", diff --git a/processing/src/test/java/org/apache/druid/segment/TestIndex.java b/processing/src/test/java/org/apache/druid/segment/TestIndex.java index 72b52873386..87d8abf1158 100644 --- a/processing/src/test/java/org/apache/druid/segment/TestIndex.java +++ b/processing/src/test/java/org/apache/druid/segment/TestIndex.java @@ -20,6 +20,7 @@ package org.apache.druid.segment; import com.google.common.base.Supplier; +import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.io.CharSource; import com.google.common.io.LineProcessor; @@ -111,12 +112,31 @@ public class TestIndex new StringDimensionSchema("null_column") ); + public static final List DIMENSION_SCHEMAS_NO_BITMAP = Arrays.asList( + new StringDimensionSchema("market", null, false), + new StringDimensionSchema("quality", null, false), + new LongDimensionSchema("qualityLong"), + new FloatDimensionSchema("qualityFloat"), + new DoubleDimensionSchema("qualityDouble"), + new StringDimensionSchema("qualityNumericString", null, false), + new StringDimensionSchema("placement", null, false), + new StringDimensionSchema("placementish", null, false), + new StringDimensionSchema("partial_null_column", null, false), + new StringDimensionSchema("null_column", null, false) + ); + public static final DimensionsSpec DIMENSIONS_SPEC = new DimensionsSpec( DIMENSION_SCHEMAS, null, null ); + public static final DimensionsSpec DIMENSIONS_SPEC_NO_BITMAPS = new DimensionsSpec( + DIMENSION_SCHEMAS_NO_BITMAP, + null, + null + ); + public static final String[] DOUBLE_METRICS = new String[]{"index", "indexMin", "indexMaxPlusTen"}; public static final String[] FLOAT_METRICS = new String[]{"indexFloat", "indexMinFloat", "indexMaxFloat"}; private static final Logger log = new Logger(TestIndex.class); @@ -147,107 +167,95 @@ public class TestIndex } } - private static IncrementalIndex realtimeIndex = null; - private static IncrementalIndex noRollupRealtimeIndex = null; - private static QueryableIndex mmappedIndex = null; - private static QueryableIndex noRollupMmappedIndex = null; - private static QueryableIndex mergedRealtime = null; + private static Supplier realtimeIndex = Suppliers.memoize( + () -> makeRealtimeIndex("druid.sample.numeric.tsv") + ); + private static Supplier noRollupRealtimeIndex = Suppliers.memoize( + () -> makeRealtimeIndex("druid.sample.numeric.tsv", false) + ); + private static Supplier noBitmapRealtimeIndex = Suppliers.memoize( + () -> makeRealtimeIndex("druid.sample.numeric.tsv", false, false) + ); + private static Supplier mmappedIndex = Suppliers.memoize( + () -> persistRealtimeAndLoadMMapped(realtimeIndex.get()) + ); + private static Supplier noRollupMmappedIndex = Suppliers.memoize( + () -> persistRealtimeAndLoadMMapped(noRollupRealtimeIndex.get()) + ); + private static Supplier noBitmapMmappedIndex = Suppliers.memoize( + () -> persistRealtimeAndLoadMMapped(noBitmapRealtimeIndex.get()) + ); + private static Supplier mergedRealtime = Suppliers.memoize(() -> { + try { + IncrementalIndex top = makeRealtimeIndex("druid.sample.numeric.tsv.top"); + IncrementalIndex bottom = makeRealtimeIndex("druid.sample.numeric.tsv.bottom"); + + File tmpFile = File.createTempFile("yay", "who"); + tmpFile.delete(); + + File topFile = new File(tmpFile, "top"); + File bottomFile = new File(tmpFile, "bottom"); + File mergedFile = new File(tmpFile, "merged"); + + topFile.mkdirs(); + topFile.deleteOnExit(); + bottomFile.mkdirs(); + bottomFile.deleteOnExit(); + mergedFile.mkdirs(); + mergedFile.deleteOnExit(); + + INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, indexSpec, null); + INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, indexSpec, null); + + return INDEX_IO.loadIndex( + INDEX_MERGER.mergeQueryableIndex( + Arrays.asList(INDEX_IO.loadIndex(topFile), INDEX_IO.loadIndex(bottomFile)), + true, + METRIC_AGGS, + mergedFile, + indexSpec, + null + ) + ); + } + catch (IOException e) { + throw Throwables.propagate(e); + } + }); public static IncrementalIndex getIncrementalTestIndex() { - synchronized (log) { - if (realtimeIndex != null) { - return realtimeIndex; - } - } - - return realtimeIndex = makeRealtimeIndex("druid.sample.numeric.tsv"); + return realtimeIndex.get(); } public static IncrementalIndex getNoRollupIncrementalTestIndex() { - synchronized (log) { - if (noRollupRealtimeIndex != null) { - return noRollupRealtimeIndex; - } - } + return noRollupRealtimeIndex.get(); + } - return noRollupRealtimeIndex = makeRealtimeIndex("druid.sample.numeric.tsv", false); + public static IncrementalIndex getNoBitmapIncrementalTestIndex() + { + return noBitmapRealtimeIndex.get(); } public static QueryableIndex getMMappedTestIndex() { - synchronized (log) { - if (mmappedIndex != null) { - return mmappedIndex; - } - } - - IncrementalIndex incrementalIndex = getIncrementalTestIndex(); - mmappedIndex = persistRealtimeAndLoadMMapped(incrementalIndex); - - return mmappedIndex; + return mmappedIndex.get(); } public static QueryableIndex getNoRollupMMappedTestIndex() { - synchronized (log) { - if (noRollupMmappedIndex != null) { - return noRollupMmappedIndex; - } - } + return noRollupMmappedIndex.get(); + } - IncrementalIndex incrementalIndex = getNoRollupIncrementalTestIndex(); - noRollupMmappedIndex = persistRealtimeAndLoadMMapped(incrementalIndex); - - return noRollupMmappedIndex; + public static QueryableIndex getNoBitmapMMappedTestIndex() + { + return noBitmapMmappedIndex.get(); } public static QueryableIndex mergedRealtimeIndex() { - synchronized (log) { - if (mergedRealtime != null) { - return mergedRealtime; - } - - try { - IncrementalIndex top = makeRealtimeIndex("druid.sample.numeric.tsv.top"); - IncrementalIndex bottom = makeRealtimeIndex("druid.sample.numeric.tsv.bottom"); - - File tmpFile = File.createTempFile("yay", "who"); - tmpFile.delete(); - - File topFile = new File(tmpFile, "top"); - File bottomFile = new File(tmpFile, "bottom"); - File mergedFile = new File(tmpFile, "merged"); - - topFile.mkdirs(); - topFile.deleteOnExit(); - bottomFile.mkdirs(); - bottomFile.deleteOnExit(); - mergedFile.mkdirs(); - mergedFile.deleteOnExit(); - - INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, indexSpec, null); - INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, indexSpec, null); - - mergedRealtime = INDEX_IO.loadIndex( - INDEX_MERGER.mergeQueryableIndex( - Arrays.asList(INDEX_IO.loadIndex(topFile), INDEX_IO.loadIndex(bottomFile)), - true, - METRIC_AGGS, - mergedFile, - indexSpec, - null - ) - ); - - return mergedRealtime; - } - catch (IOException e) { - throw Throwables.propagate(e); - } - } + return mergedRealtime.get(); } public static IncrementalIndex makeRealtimeIndex(final String resourceFilename) @@ -256,6 +264,11 @@ public class TestIndex } public static IncrementalIndex makeRealtimeIndex(final String resourceFilename, boolean rollup) + { + return makeRealtimeIndex(resourceFilename, rollup, true); + } + + public static IncrementalIndex makeRealtimeIndex(final String resourceFilename, boolean rollup, boolean bitmap) { final URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename); if (resource == null) { @@ -263,20 +276,20 @@ public class TestIndex } log.info("Realtime loading index file[%s]", resource); CharSource stream = Resources.asByteSource(resource).asCharSource(StandardCharsets.UTF_8); - return makeRealtimeIndex(stream, rollup); + return makeRealtimeIndex(stream, rollup, bitmap); } public static IncrementalIndex makeRealtimeIndex(final CharSource source) { - return makeRealtimeIndex(source, true); + return makeRealtimeIndex(source, true, true); } - public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup) + public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup, boolean bitmap) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .withTimestampSpec(new TimestampSpec("ds", "auto", null)) - .withDimensionsSpec(DIMENSIONS_SPEC) + .withDimensionsSpec(bitmap ? DIMENSIONS_SPEC : DIMENSIONS_SPEC_NO_BITMAPS) .withVirtualColumns(VIRTUAL_COLUMNS) .withMetrics(METRIC_AGGS) .withRollup(rollup) diff --git a/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java b/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java index 3d3d50c971a..f9d4a91d19e 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java @@ -117,22 +117,22 @@ public class ExpressionFilterTest extends BaseFilterTest @Test public void testOneSingleValuedStringColumn() { - assertFilterMatches(EDF("dim3 == ''"), ImmutableList.of("0")); - assertFilterMatches(EDF("dim3 == '1'"), ImmutableList.of("3", "4", "6")); - assertFilterMatches(EDF("dim3 == 'a'"), ImmutableList.of("7")); - assertFilterMatches(EDF("dim3 == 1"), ImmutableList.of("3", "4", "6")); - assertFilterMatches(EDF("dim3 == 1.0"), ImmutableList.of("3", "4", "6")); - assertFilterMatches(EDF("dim3 == 1.234"), ImmutableList.of("9")); - assertFilterMatches(EDF("dim3 < '2'"), ImmutableList.of("0", "1", "3", "4", "6", "9")); + assertFilterMatches(edf("dim3 == ''"), ImmutableList.of("0")); + assertFilterMatches(edf("dim3 == '1'"), ImmutableList.of("3", "4", "6")); + assertFilterMatches(edf("dim3 == 'a'"), ImmutableList.of("7")); + assertFilterMatches(edf("dim3 == 1"), ImmutableList.of("3", "4", "6")); + assertFilterMatches(edf("dim3 == 1.0"), ImmutableList.of("3", "4", "6")); + assertFilterMatches(edf("dim3 == 1.234"), ImmutableList.of("9")); + assertFilterMatches(edf("dim3 < '2'"), ImmutableList.of("0", "1", "3", "4", "6", "9")); if (NullHandling.replaceWithDefault()) { - assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("0", "3", "4", "6", "7", "9")); - assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("0", "3", "4", "6", "7", "9")); + assertFilterMatches(edf("dim3 < 2"), ImmutableList.of("0", "3", "4", "6", "7", "9")); + assertFilterMatches(edf("dim3 < 2.0"), ImmutableList.of("0", "3", "4", "6", "7", "9")); } else { // Empty String and "a" will not match - assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("3", "4", "6", "9")); - assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("3", "4", "6", "9")); + assertFilterMatches(edf("dim3 < 2"), ImmutableList.of("3", "4", "6", "9")); + assertFilterMatches(edf("dim3 < 2.0"), ImmutableList.of("3", "4", "6", "9")); } - assertFilterMatches(EDF("like(dim3, '1%')"), ImmutableList.of("1", "3", "4", "6", "9")); + assertFilterMatches(edf("like(dim3, '1%')"), ImmutableList.of("1", "3", "4", "6", "9")); } @Test @@ -141,124 +141,124 @@ public class ExpressionFilterTest extends BaseFilterTest // Expressions currently treat multi-valued arrays as nulls. // This test is just documenting the current behavior, not necessarily saying it makes sense. if (NullHandling.replaceWithDefault()) { - assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("0", "1", "2", "4", "5", "6", "7", "8")); + assertFilterMatches(edf("dim4 == ''"), ImmutableList.of("0", "1", "2", "4", "5", "6", "7", "8")); } else { - assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("2")); + assertFilterMatches(edf("dim4 == ''"), ImmutableList.of("2")); // AS per SQL standard null == null returns false. - assertFilterMatches(EDF("dim4 == null"), ImmutableList.of()); + assertFilterMatches(edf("dim4 == null"), ImmutableList.of()); } - assertFilterMatches(EDF("dim4 == '1'"), ImmutableList.of()); - assertFilterMatches(EDF("dim4 == '3'"), ImmutableList.of("3")); + assertFilterMatches(edf("dim4 == '1'"), ImmutableList.of()); + assertFilterMatches(edf("dim4 == '3'"), ImmutableList.of("3")); } @Test public void testOneLongColumn() { if (NullHandling.replaceWithDefault()) { - assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of("0")); + assertFilterMatches(edf("dim1 == ''"), ImmutableList.of("0")); } else { // A long does not match empty string - assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of()); + assertFilterMatches(edf("dim1 == ''"), ImmutableList.of()); } - assertFilterMatches(EDF("dim1 == '1'"), ImmutableList.of("1")); - assertFilterMatches(EDF("dim1 == 2"), ImmutableList.of("2")); - assertFilterMatches(EDF("dim1 < '2'"), ImmutableList.of("0", "1")); - assertFilterMatches(EDF("dim1 < 2"), ImmutableList.of("0", "1")); - assertFilterMatches(EDF("dim1 < 2.0"), ImmutableList.of("0", "1")); - assertFilterMatches(EDF("like(dim1, '1%')"), ImmutableList.of("1")); + assertFilterMatches(edf("dim1 == '1'"), ImmutableList.of("1")); + assertFilterMatches(edf("dim1 == 2"), ImmutableList.of("2")); + assertFilterMatches(edf("dim1 < '2'"), ImmutableList.of("0", "1")); + assertFilterMatches(edf("dim1 < 2"), ImmutableList.of("0", "1")); + assertFilterMatches(edf("dim1 < 2.0"), ImmutableList.of("0", "1")); + assertFilterMatches(edf("like(dim1, '1%')"), ImmutableList.of("1")); } @Test public void testOneFloatColumn() { if (NullHandling.replaceWithDefault()) { - assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of("0")); + assertFilterMatches(edf("dim2 == ''"), ImmutableList.of("0")); } else { // A float does not match empty string - assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of()); + assertFilterMatches(edf("dim2 == ''"), ImmutableList.of()); } - assertFilterMatches(EDF("dim2 == '1'"), ImmutableList.of("1")); - assertFilterMatches(EDF("dim2 == 2"), ImmutableList.of("2")); - assertFilterMatches(EDF("dim2 < '2'"), ImmutableList.of("0", "1")); - assertFilterMatches(EDF("dim2 < 2"), ImmutableList.of("0", "1")); - assertFilterMatches(EDF("dim2 < 2.0"), ImmutableList.of("0", "1")); - assertFilterMatches(EDF("like(dim2, '1%')"), ImmutableList.of("1")); + assertFilterMatches(edf("dim2 == '1'"), ImmutableList.of("1")); + assertFilterMatches(edf("dim2 == 2"), ImmutableList.of("2")); + assertFilterMatches(edf("dim2 < '2'"), ImmutableList.of("0", "1")); + assertFilterMatches(edf("dim2 < 2"), ImmutableList.of("0", "1")); + assertFilterMatches(edf("dim2 < 2.0"), ImmutableList.of("0", "1")); + assertFilterMatches(edf("like(dim2, '1%')"), ImmutableList.of("1")); } @Test public void testConstantExpression() { - assertFilterMatches(EDF("1 + 1"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(EDF("0 + 0"), ImmutableList.of()); + assertFilterMatches(edf("1 + 1"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); + assertFilterMatches(edf("0 + 0"), ImmutableList.of()); } @Test public void testCompareColumns() { // String vs string - assertFilterMatches(EDF("dim0 == dim3"), ImmutableList.of("2", "5", "8")); + assertFilterMatches(edf("dim0 == dim3"), ImmutableList.of("2", "5", "8")); if (NullHandling.replaceWithDefault()) { // String vs long - assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("0", "2", "5", "8")); + assertFilterMatches(edf("dim1 == dim3"), ImmutableList.of("0", "2", "5", "8")); // String vs float - assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("0", "2", "5", "8")); + assertFilterMatches(edf("dim2 == dim3"), ImmutableList.of("0", "2", "5", "8")); } else { // String vs long - assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("2", "5", "8")); + assertFilterMatches(edf("dim1 == dim3"), ImmutableList.of("2", "5", "8")); // String vs float - assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("2", "5", "8")); + assertFilterMatches(edf("dim2 == dim3"), ImmutableList.of("2", "5", "8")); } // String vs. multi-value string // Expressions currently treat multi-valued arrays as nulls. // This test is just documenting the current behavior, not necessarily saying it makes sense. - assertFilterMatches(EDF("dim0 == dim4"), ImmutableList.of("3")); + assertFilterMatches(edf("dim0 == dim4"), ImmutableList.of("3")); } @Test public void testMissingColumn() { if (NullHandling.replaceWithDefault()) { - assertFilterMatches(EDF("missing == ''"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); + assertFilterMatches(edf("missing == ''"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); } else { // AS per SQL standard null == null returns false. - assertFilterMatches(EDF("missing == null"), ImmutableList.of()); + assertFilterMatches(edf("missing == null"), ImmutableList.of()); } - assertFilterMatches(EDF("missing == '1'"), ImmutableList.of()); - assertFilterMatches(EDF("missing == 2"), ImmutableList.of()); + assertFilterMatches(edf("missing == '1'"), ImmutableList.of()); + assertFilterMatches(edf("missing == 2"), ImmutableList.of()); if (NullHandling.replaceWithDefault()) { // missing equivaluent to 0 - assertFilterMatches(EDF("missing < '2'"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(EDF("missing < 2"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); - assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); + assertFilterMatches(edf("missing < '2'"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); + assertFilterMatches(edf("missing < 2"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); + assertFilterMatches(edf("missing < 2.0"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); } else { // missing equivalent to null - assertFilterMatches(EDF("missing < '2'"), ImmutableList.of()); - assertFilterMatches(EDF("missing < 2"), ImmutableList.of()); - assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of()); + assertFilterMatches(edf("missing < '2'"), ImmutableList.of()); + assertFilterMatches(edf("missing < 2"), ImmutableList.of()); + assertFilterMatches(edf("missing < 2.0"), ImmutableList.of()); } - assertFilterMatches(EDF("missing > '2'"), ImmutableList.of()); - assertFilterMatches(EDF("missing > 2"), ImmutableList.of()); - assertFilterMatches(EDF("missing > 2.0"), ImmutableList.of()); - assertFilterMatches(EDF("like(missing, '1%')"), ImmutableList.of()); + assertFilterMatches(edf("missing > '2'"), ImmutableList.of()); + assertFilterMatches(edf("missing > 2"), ImmutableList.of()); + assertFilterMatches(edf("missing > 2.0"), ImmutableList.of()); + assertFilterMatches(edf("like(missing, '1%')"), ImmutableList.of()); } @Test public void testGetRequiredColumn() { - Assert.assertEquals(EDF("like(dim1, '1%')").getRequiredColumns(), Sets.newHashSet("dim1")); - Assert.assertEquals(EDF("dim2 == '1'").getRequiredColumns(), Sets.newHashSet("dim2")); - Assert.assertEquals(EDF("dim3 < '2'").getRequiredColumns(), Sets.newHashSet("dim3")); - Assert.assertEquals(EDF("dim4 == ''").getRequiredColumns(), Sets.newHashSet("dim4")); - Assert.assertEquals(EDF("1 + 1").getRequiredColumns(), new HashSet<>()); - Assert.assertEquals(EDF("dim0 == dim3").getRequiredColumns(), Sets.newHashSet("dim0", "dim3")); - Assert.assertEquals(EDF("missing == ''").getRequiredColumns(), Sets.newHashSet("missing")); + Assert.assertEquals(edf("like(dim1, '1%')").getRequiredColumns(), Sets.newHashSet("dim1")); + Assert.assertEquals(edf("dim2 == '1'").getRequiredColumns(), Sets.newHashSet("dim2")); + Assert.assertEquals(edf("dim3 < '2'").getRequiredColumns(), Sets.newHashSet("dim3")); + Assert.assertEquals(edf("dim4 == ''").getRequiredColumns(), Sets.newHashSet("dim4")); + Assert.assertEquals(edf("1 + 1").getRequiredColumns(), new HashSet<>()); + Assert.assertEquals(edf("dim0 == dim3").getRequiredColumns(), Sets.newHashSet("dim0", "dim3")); + Assert.assertEquals(edf("missing == ''").getRequiredColumns(), Sets.newHashSet("missing")); } - private static ExpressionDimFilter EDF(final String expression) + private static ExpressionDimFilter edf(final String expression) { return new ExpressionDimFilter(expression, TestExprMacroTable.INSTANCE); } diff --git a/server/src/main/java/org/apache/druid/client/coordinator/CoordinatorClient.java b/server/src/main/java/org/apache/druid/client/coordinator/CoordinatorClient.java index 2a197cfa0d8..41885405938 100644 --- a/server/src/main/java/org/apache/druid/client/coordinator/CoordinatorClient.java +++ b/server/src/main/java/org/apache/druid/client/coordinator/CoordinatorClient.java @@ -28,11 +28,13 @@ import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.http.client.response.FullResponseHolder; import org.apache.druid.query.SegmentDescriptor; +import org.apache.druid.timeline.DataSegment; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.joda.time.Interval; import javax.annotation.Nullable; +import javax.ws.rs.core.MediaType; import java.util.List; public class CoordinatorClient @@ -95,13 +97,15 @@ public class CoordinatorClient { try { FullResponseHolder response = druidLeaderClient.go( - druidLeaderClient.makeRequest(HttpMethod.GET, - StringUtils.format( - "/druid/coordinator/v1/datasources/%s/intervals/%s/serverview?partial=%s", - dataSource, - interval.toString().replace('/', '_'), - incompleteOk - )) + druidLeaderClient.makeRequest( + HttpMethod.GET, + StringUtils.format( + "/druid/coordinator/v1/datasources/%s/intervals/%s/serverview?partial=%s", + StringUtils.urlEncode(dataSource), + interval.toString().replace('/', '_'), + incompleteOk + ) + ) ); if (!response.getStatus().equals(HttpResponseStatus.OK)) { @@ -121,4 +125,35 @@ public class CoordinatorClient throw new RuntimeException(e); } } + + public List getDatabaseSegmentDataSourceSegments(String dataSource, List intervals) + { + try { + FullResponseHolder response = druidLeaderClient.go( + druidLeaderClient.makeRequest( + HttpMethod.POST, + StringUtils.format( + "/druid/coordinator/v1/metadata/datasources/%s/segments?full", + StringUtils.urlEncode(dataSource) + ) + ).setContent(MediaType.APPLICATION_JSON, jsonMapper.writeValueAsBytes(intervals)) + ); + + if (!response.getStatus().equals(HttpResponseStatus.OK)) { + throw new ISE( + "Error while fetching database segment data source segments status[%s] content[%s]", + response.getStatus(), + response.getContent() + ); + } + return jsonMapper.readValue( + response.getContent(), new TypeReference>() + { + } + ); + } + catch (Exception e) { + throw new RuntimeException(e); + } + } } diff --git a/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java b/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java index 68e8a20b62b..92987354a74 100644 --- a/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java +++ b/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java @@ -58,6 +58,9 @@ public class SegmentLoaderLocalCacheManager implements SegmentLoader } }; + // Note that we only create this via injection in historical and realtime nodes. Peons create these + // objects via SegmentLoaderFactory objects, so that they can store segments in task-specific + // directories rather than statically configured directories. @Inject public SegmentLoaderLocalCacheManager( IndexIO indexIO, @@ -79,11 +82,6 @@ public class SegmentLoaderLocalCacheManager implements SegmentLoader } } - public SegmentLoaderLocalCacheManager withConfig(SegmentLoaderConfig config) - { - return new SegmentLoaderLocalCacheManager(indexIO, config, jsonMapper); - } - @Override public boolean isSegmentLoaded(final DataSegment segment) { diff --git a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java index b860b002128..15a650b38ce 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java @@ -100,8 +100,8 @@ public class AppenderatorPlumberTest // getDataSource Assert.assertEquals(AppenderatorTester.DATASOURCE, appenderator.getDataSource()); - InputRow[] rows = new InputRow[] {AppenderatorTest.IR("2000", "foo", 1), - AppenderatorTest.IR("2000", "bar", 2), AppenderatorTest.IR("2000", "qux", 4)}; + InputRow[] rows = new InputRow[] {AppenderatorTest.ir("2000", "foo", 1), + AppenderatorTest.ir("2000", "bar", 2), AppenderatorTest.ir("2000", "qux", 4)}; // add Assert.assertEquals(1, plumber.add(rows[0], null).getRowCount()); diff --git a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorTest.java index e0f67dc0a0e..334214d2174 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorTest.java @@ -58,9 +58,9 @@ import java.util.concurrent.atomic.AtomicInteger; public class AppenderatorTest { private static final List IDENTIFIERS = ImmutableList.of( - SI("2000/2001", "A", 0), - SI("2000/2001", "A", 1), - SI("2001/2002", "A", 0) + si("2000/2001", "A", 0), + si("2000/2001", "A", 1), + si("2001/2002", "A", 0) ); @Test @@ -83,21 +83,21 @@ public class AppenderatorTest commitMetadata.put("x", "1"); Assert.assertEquals( 1, - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier) + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier) .getNumRowsInSegment() ); commitMetadata.put("x", "2"); Assert.assertEquals( 2, - appenderator.add(IDENTIFIERS.get(0), IR("2000", "bar", 2), committerSupplier) + appenderator.add(IDENTIFIERS.get(0), ir("2000", "bar", 2), committerSupplier) .getNumRowsInSegment() ); commitMetadata.put("x", "3"); Assert.assertEquals( 1, - appenderator.add(IDENTIFIERS.get(1), IR("2000", "qux", 4), committerSupplier) + appenderator.add(IDENTIFIERS.get(1), ir("2000", "qux", 4), committerSupplier) .getNumRowsInSegment() ); @@ -173,14 +173,14 @@ public class AppenderatorTest }; appenderator.startJob(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier); //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138 + 1 byte when null handling is enabled int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0; Assert.assertEquals( 138 + nullHandlingOverhead, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0)) ); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier); Assert.assertEquals( 138 + nullHandlingOverhead, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(1)) @@ -216,11 +216,11 @@ public class AppenderatorTest }; appenderator.startJob(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier); //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138 int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0; Assert.assertEquals(138 + nullHandlingOverhead, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier); Assert.assertEquals( 276 + 2 * nullHandlingOverhead, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory() @@ -258,7 +258,7 @@ public class AppenderatorTest Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); appenderator.startJob(); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier); //we still calculate the size even when ignoring it to make persist decision int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0; Assert.assertEquals( @@ -266,7 +266,7 @@ public class AppenderatorTest ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0)) ); Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier); Assert.assertEquals( 276 + 2 * nullHandlingOverhead, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory() @@ -310,17 +310,17 @@ public class AppenderatorTest Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); appenderator.startJob(); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier); Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier); Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier); Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "baz", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "baz", 1), committerSupplier); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "qux", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "qux", 1), committerSupplier); Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "bob", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "bob", 1), committerSupplier); Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory()); appenderator.persistAll(committerSupplier.get()); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); @@ -356,17 +356,17 @@ public class AppenderatorTest Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); appenderator.startJob(); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier, false); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier, false); Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier, false); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier, false); Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier, false); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier, false); Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "baz", 1), committerSupplier, false); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "baz", 1), committerSupplier, false); Assert.assertEquals(3, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "qux", 1), committerSupplier, false); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "qux", 1), committerSupplier, false); Assert.assertEquals(4, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "bob", 1), committerSupplier, false); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "bob", 1), committerSupplier, false); Assert.assertEquals(5, ((AppenderatorImpl) appenderator).getRowsInMemory()); appenderator.persistAll(committerSupplier.get()); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); @@ -409,15 +409,15 @@ public class AppenderatorTest appenderator.startJob(); eventCount.incrementAndGet(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier); eventCount.incrementAndGet(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "bar", 2), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "bar", 2), committerSupplier); eventCount.incrementAndGet(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "baz", 3), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "baz", 3), committerSupplier); eventCount.incrementAndGet(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "qux", 4), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "qux", 4), committerSupplier); eventCount.incrementAndGet(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "bob", 5), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "bob", 5), committerSupplier); appenderator.close(); try (final AppenderatorTester tester2 = new AppenderatorTester( @@ -445,9 +445,9 @@ public class AppenderatorTest Assert.assertEquals(0, appenderator.getTotalRowCount()); appenderator.startJob(); Assert.assertEquals(0, appenderator.getTotalRowCount()); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), committerSupplier); Assert.assertEquals(1, appenderator.getTotalRowCount()); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "bar", 1), committerSupplier); Assert.assertEquals(2, appenderator.getTotalRowCount()); appenderator.persistAll(committerSupplier.get()).get(); @@ -457,13 +457,13 @@ public class AppenderatorTest appenderator.drop(IDENTIFIERS.get(1)).get(); Assert.assertEquals(0, appenderator.getTotalRowCount()); - appenderator.add(IDENTIFIERS.get(2), IR("2001", "bar", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(2), ir("2001", "bar", 1), committerSupplier); Assert.assertEquals(1, appenderator.getTotalRowCount()); - appenderator.add(IDENTIFIERS.get(2), IR("2001", "baz", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(2), ir("2001", "baz", 1), committerSupplier); Assert.assertEquals(2, appenderator.getTotalRowCount()); - appenderator.add(IDENTIFIERS.get(2), IR("2001", "qux", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(2), ir("2001", "qux", 1), committerSupplier); Assert.assertEquals(3, appenderator.getTotalRowCount()); - appenderator.add(IDENTIFIERS.get(2), IR("2001", "bob", 1), committerSupplier); + appenderator.add(IDENTIFIERS.get(2), ir("2001", "bob", 1), committerSupplier); Assert.assertEquals(4, appenderator.getTotalRowCount()); appenderator.persistAll(committerSupplier.get()).get(); @@ -483,13 +483,13 @@ public class AppenderatorTest final Appenderator appenderator = tester.getAppenderator(); appenderator.startJob(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 2), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "foo", 4), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001", "foo", 8), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001T01", "foo", 16), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001T02", "foo", 32), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001T03", "foo", 64), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 2), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "foo", 4), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001", "foo", 8), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001T01", "foo", 16), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001T02", "foo", 32), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001T03", "foo", 64), Suppliers.ofInstance(Committers.nil())); // Query1: 2000/2001 final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() @@ -619,13 +619,13 @@ public class AppenderatorTest final Appenderator appenderator = tester.getAppenderator(); appenderator.startJob(); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 2), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(1), IR("2000", "foo", 4), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001", "foo", 8), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001T01", "foo", 16), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001T02", "foo", 32), Suppliers.ofInstance(Committers.nil())); - appenderator.add(IDENTIFIERS.get(2), IR("2001T03", "foo", 64), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 1), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(0), ir("2000", "foo", 2), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(1), ir("2000", "foo", 4), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001", "foo", 8), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001T01", "foo", 16), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001T02", "foo", 32), Suppliers.ofInstance(Committers.nil())); + appenderator.add(IDENTIFIERS.get(2), ir("2001T03", "foo", 64), Suppliers.ofInstance(Committers.nil())); // Query1: segment #2 final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() @@ -742,7 +742,7 @@ public class AppenderatorTest } } - private static SegmentIdWithShardSpec SI(String interval, String version, int partitionNum) + private static SegmentIdWithShardSpec si(String interval, String version, int partitionNum) { return new SegmentIdWithShardSpec( AppenderatorTester.DATASOURCE, @@ -752,7 +752,7 @@ public class AppenderatorTest ); } - static InputRow IR(String ts, String dim, long met) + static InputRow ir(String ts, String dim, long met) { return new MapBasedInputRow( DateTimes.of(ts).getMillis(), diff --git a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java index fbf85b44600..e02ca6d657b 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java @@ -167,9 +167,9 @@ public class DefaultOfflineAppenderatorFactoryTest new LinearShardSpec(0) ); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(identifier, AppenderatorTest.IR("2000", "bar", 1), Suppliers.ofInstance(Committers.nil())); + appenderator.add(identifier, AppenderatorTest.ir("2000", "bar", 1), Suppliers.ofInstance(Committers.nil())); Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory()); - appenderator.add(identifier, AppenderatorTest.IR("2000", "baz", 1), Suppliers.ofInstance(Committers.nil())); + appenderator.add(identifier, AppenderatorTest.ir("2000", "baz", 1), Suppliers.ofInstance(Committers.nil())); Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory()); appenderator.close(); Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory()); diff --git a/services/src/main/java/org/apache/druid/cli/CliPeon.java b/services/src/main/java/org/apache/druid/cli/CliPeon.java index 71dc3050c28..e4334f4cb8e 100644 --- a/services/src/main/java/org/apache/druid/cli/CliPeon.java +++ b/services/src/main/java/org/apache/druid/cli/CliPeon.java @@ -91,7 +91,6 @@ import org.apache.druid.segment.loading.DataSegmentMover; import org.apache.druid.segment.loading.OmniDataSegmentArchiver; import org.apache.druid.segment.loading.OmniDataSegmentKiller; import org.apache.druid.segment.loading.OmniDataSegmentMover; -import org.apache.druid.segment.loading.SegmentLoaderConfig; import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; import org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider; import org.apache.druid.segment.realtime.firehose.ServiceAnnouncingChatHandlerProvider; @@ -109,7 +108,6 @@ import org.eclipse.jetty.server.Server; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.Set; @@ -255,12 +253,6 @@ public class CliPeon extends GuiceRunnable .to(CoordinatorBasedSegmentHandoffNotifierFactory.class) .in(LazySingleton.class); - // Override the default SegmentLoaderConfig because we don't actually care about the - // configuration based locations. This will override them anyway. This is also stopping - // configuration of other parameters, but I don't think that's actually a problem. - // Note, if that is actually not a problem, then that probably means we have the wrong abstraction. - binder.bind(SegmentLoaderConfig.class) - .toInstance(new SegmentLoaderConfig().withLocations(Collections.emptyList())); binder.bind(CoordinatorClient.class).in(LazySingleton.class); binder.bind(JettyServerInitializer.class).to(QueryJettyServerInitializer.class); diff --git a/services/src/main/java/org/apache/druid/cli/RouterJettyServerInitializer.java b/services/src/main/java/org/apache/druid/cli/RouterJettyServerInitializer.java index 57b6cbbd2e7..d76fe83e448 100644 --- a/services/src/main/java/org/apache/druid/cli/RouterJettyServerInitializer.java +++ b/services/src/main/java/org/apache/druid/cli/RouterJettyServerInitializer.java @@ -66,6 +66,7 @@ public class RouterJettyServerInitializer implements JettyServerInitializer "/", "/coordinator-console/*", "/public/*", + "/assets/*", "/old-console/*", "/pages/*", "/unified-console.html", diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 4f25a66c113..425632d5ded 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -362,7 +362,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( - ROW(Pair.of("TABLE_CAT", "druid")) + row(Pair.of("TABLE_CAT", "druid")) ), getRows(metaData.getCatalogs()) ); @@ -374,7 +374,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( - ROW(Pair.of("TABLE_CATALOG", "druid"), Pair.of("TABLE_SCHEM", "druid")) + row(Pair.of("TABLE_CATALOG", "druid"), Pair.of("TABLE_SCHEM", "druid")) ), getRows(metaData.getSchemas(null, "druid")) ); @@ -386,19 +386,19 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( - ROW( + row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE1), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), - ROW( + row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE2), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), - ROW( + row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE3), Pair.of("TABLE_SCHEM", "druid"), @@ -418,25 +418,25 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase final DatabaseMetaData metaData = superuserClient.getMetaData(); Assert.assertEquals( ImmutableList.of( - ROW( + row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE1), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), - ROW( + row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE2), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), - ROW( + row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_TYPE", "TABLE") ), - ROW( + row( Pair.of("TABLE_CAT", "druid"), Pair.of("TABLE_NAME", CalciteTests.DATASOURCE3), Pair.of("TABLE_SCHEM", "druid"), @@ -456,7 +456,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase final DatabaseMetaData metaData = client.getMetaData(); Assert.assertEquals( ImmutableList.of( - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "__time"), @@ -464,7 +464,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "cnt"), @@ -472,7 +472,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "BIGINT"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "dim1"), @@ -480,7 +480,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "dim2"), @@ -488,7 +488,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "dim3"), @@ -496,7 +496,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "m1"), @@ -504,7 +504,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "FLOAT"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "m2"), @@ -512,7 +512,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "DOUBLE"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), Pair.of("COLUMN_NAME", "unique_dim1"), @@ -547,7 +547,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase final DatabaseMetaData metaData = superuserClient.getMetaData(); Assert.assertEquals( ImmutableList.of( - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "__time"), @@ -555,7 +555,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "cnt"), @@ -563,7 +563,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "BIGINT"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "dim1"), @@ -571,7 +571,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "dim2"), @@ -579,7 +579,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "m1"), @@ -587,7 +587,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "FLOAT"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "m2"), @@ -595,7 +595,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "DOUBLE"), Pair.of("IS_NULLABLE", "NO") ), - ROW( + row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), Pair.of("COLUMN_NAME", "unique_dim1"), @@ -928,7 +928,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase } } - private static Map ROW(final Pair... entries) + private static Map row(final Pair... entries) { final Map m = new HashMap<>(); for (Pair entry : entries) { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java index a40b424e699..b0834f2814c 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java @@ -243,70 +243,70 @@ public class BaseCalciteQueryTest extends CalciteTestBase } // Generate timestamps for expected results - public static long T(final String timeString) + public static long t(final String timeString) { return Calcites.jodaToCalciteTimestamp(DateTimes.of(timeString), DateTimeZone.UTC); } // Generate timestamps for expected results - public static long T(final String timeString, final String timeZoneString) + public static long t(final String timeString, final String timeZoneString) { final DateTimeZone timeZone = DateTimes.inferTzFromString(timeZoneString); return Calcites.jodaToCalciteTimestamp(new DateTime(timeString, timeZone), timeZone); } // Generate day numbers for expected results - public static int D(final String dayString) + public static int d(final String dayString) { - return (int) (Intervals.utc(T("1970"), T(dayString)).toDurationMillis() / (86400L * 1000L)); + return (int) (Intervals.utc(t("1970"), t(dayString)).toDurationMillis() / (86400L * 1000L)); } - public static QuerySegmentSpec QSS(final Interval... intervals) + public static QuerySegmentSpec querySegmentSpec(final Interval... intervals) { return new MultipleIntervalSegmentSpec(Arrays.asList(intervals)); } - public static AndDimFilter AND(DimFilter... filters) + public static AndDimFilter and(DimFilter... filters) { return new AndDimFilter(Arrays.asList(filters)); } - public static OrDimFilter OR(DimFilter... filters) + public static OrDimFilter or(DimFilter... filters) { return new OrDimFilter(Arrays.asList(filters)); } - public static NotDimFilter NOT(DimFilter filter) + public static NotDimFilter not(DimFilter filter) { return new NotDimFilter(filter); } - public static InDimFilter IN(String dimension, List values, ExtractionFn extractionFn) + public static InDimFilter in(String dimension, List values, ExtractionFn extractionFn) { return new InDimFilter(dimension, values, extractionFn); } - public static SelectorDimFilter SELECTOR(final String fieldName, final String value, final ExtractionFn extractionFn) + public static SelectorDimFilter selector(final String fieldName, final String value, final ExtractionFn extractionFn) { return new SelectorDimFilter(fieldName, value, extractionFn); } - public static ExpressionDimFilter EXPRESSION_FILTER(final String expression) + public static ExpressionDimFilter expressionFilter(final String expression) { return new ExpressionDimFilter(expression, CalciteTests.createExprMacroTable()); } - public static DimFilter NUMERIC_SELECTOR( + public static DimFilter numeric_Selector( final String fieldName, final String value, final ExtractionFn extractionFn ) { // We use Bound filters for numeric equality to achieve "10.0" = "10" - return BOUND(fieldName, value, value, false, false, extractionFn, StringComparators.NUMERIC); + return bound(fieldName, value, value, false, false, extractionFn, StringComparators.NUMERIC); } - public static BoundDimFilter BOUND( + public static BoundDimFilter bound( final String fieldName, final String lower, final String upper, @@ -319,7 +319,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase return new BoundDimFilter(fieldName, lower, upper, lowerStrict, upperStrict, null, extractionFn, comparator); } - public static BoundDimFilter TIME_BOUND(final Object intervalObj) + public static BoundDimFilter timeBound(final Object intervalObj) { final Interval interval = new Interval(intervalObj, ISOChronology.getInstanceUTC()); return new BoundDimFilter( @@ -329,32 +329,32 @@ public class BaseCalciteQueryTest extends CalciteTestBase false, true, null, - null, + null, StringComparators.NUMERIC ); } - public static CascadeExtractionFn CASCADE(final ExtractionFn... fns) + public static CascadeExtractionFn cascade(final ExtractionFn... fns) { return new CascadeExtractionFn(fns); } - public static List DIMS(final DimensionSpec... dimensionSpecs) + public static List dimensionSpec(final DimensionSpec... dimensionSpecs) { return Arrays.asList(dimensionSpecs); } - public static List AGGS(final AggregatorFactory... aggregators) + public static List aggregators(final AggregatorFactory... aggregators) { return Arrays.asList(aggregators); } - public static DimFilterHavingSpec HAVING(final DimFilter filter) + public static DimFilterHavingSpec having(final DimFilter filter) { return new DimFilterHavingSpec(filter, true); } - public static ExpressionVirtualColumn EXPRESSION_VIRTUAL_COLUMN( + public static ExpressionVirtualColumn expression_Virtual_Column( final String name, final String expression, final ValueType outputType @@ -363,7 +363,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase return new ExpressionVirtualColumn(name, expression, outputType, CalciteTests.createExprMacroTable()); } - public static ExpressionPostAggregator EXPRESSION_POST_AGG(final String name, final String expression) + public static ExpressionPostAggregator expresionPostAgg(final String name, final String expression) { return new ExpressionPostAggregator(name, expression, null, CalciteTests.createExprMacroTable()); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index d131cd361f3..7e12553cb56 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -108,8 +108,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .virtualColumns(EXPRESSION_VIRTUAL_COLUMN("v0", "2", ValueType.LONG)) + .intervals(querySegmentSpec(Filtration.eternity())) + .virtualColumns(expression_Virtual_Column("v0", "2", ValueType.LONG)) .columns("dim1", "v0") .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .limit(1) @@ -133,15 +133,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(SELECTOR("dim2", "0", null)) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(selector("dim2", "0", null)) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2") )) .postAggregators( - EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + expresionPostAgg("p0", "(exp(\"a0\") + 10)") ) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) .build()), @@ -157,14 +157,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) + .intervals(querySegmentSpec(Intervals.of( + "2999-01-01T00:00:00.000Z/146140482-04-24T15:36:27.903Z"))) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2") )) .postAggregators( - EXPRESSION_POST_AGG("p0", "(exp(\"a0\") + 10)") + expresionPostAgg("p0", "(exp(\"a0\") + 10)") ) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) .build()), @@ -177,10 +178,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest "SELECT COUNT(*) FROM foo WHERE dim1 = 'nonexistent' GROUP BY FLOOR(__time TO DAY)", ImmutableList.of(Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(SELECTOR("dim1", "nonexistent", null)) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(selector("dim1", "nonexistent", null)) .granularity(Granularities.DAY) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0") )) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -212,21 +213,21 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .postAggregators( - EXPRESSION_POST_AGG("p0", "'foo'"), - EXPRESSION_POST_AGG("p1", "'xfoo'"), - EXPRESSION_POST_AGG("p2", "'foo'"), - EXPRESSION_POST_AGG("p3", "' foo'"), - EXPRESSION_POST_AGG("p4", "'foo'"), - EXPRESSION_POST_AGG("p5", "'foo'"), - EXPRESSION_POST_AGG("p6", "'foo'"), - EXPRESSION_POST_AGG("p7", "'foo '"), - EXPRESSION_POST_AGG("p8", "'foox'"), - EXPRESSION_POST_AGG("p9", "' foo'"), - EXPRESSION_POST_AGG("p10", "'xfoo'") + expresionPostAgg("p0", "'foo'"), + expresionPostAgg("p1", "'xfoo'"), + expresionPostAgg("p2", "'foo'"), + expresionPostAgg("p3", "' foo'"), + expresionPostAgg("p4", "'foo'"), + expresionPostAgg("p5", "'foo'"), + expresionPostAgg("p6", "'foo'"), + expresionPostAgg("p7", "'foo '"), + expresionPostAgg("p8", "'foox'"), + expresionPostAgg("p9", "' foo'"), + expresionPostAgg("p10", "'xfoo'") ) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -308,7 +309,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .add(new Object[]{"sys", "server_segments", "SYSTEM_TABLE"}) .add(new Object[]{"sys", "servers", "SYSTEM_TABLE"}) .add(new Object[]{"sys", "tasks", "SYSTEM_TABLE"}) - .build() + .build() ); } @@ -429,21 +430,21 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1") .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, hyperLogLogCollectorClassName}, + new Object[]{t("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, hyperLogLogCollectorClassName}, new Object[]{ - T("2000-01-02"), 1L, "10.1", NULL_VALUE, "[\"b\",\"c\"]", 2f, 2.0, hyperLogLogCollectorClassName + t("2000-01-02"), 1L, "10.1", NULL_VALUE, "[\"b\",\"c\"]", 2f, 2.0, hyperLogLogCollectorClassName }, - new Object[]{T("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, hyperLogLogCollectorClassName}, - new Object[]{T("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, hyperLogLogCollectorClassName}, - new Object[]{T("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5f, 5.0, hyperLogLogCollectorClassName}, - new Object[]{T("2001-01-03"), 1L, "abc", NULL_VALUE, NULL_VALUE, 6f, 6.0, hyperLogLogCollectorClassName} + new Object[]{t("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, hyperLogLogCollectorClassName}, + new Object[]{t("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, hyperLogLogCollectorClassName}, + new Object[]{t("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5f, 5.0, hyperLogLogCollectorClassName}, + new Object[]{t("2001-01-03"), 1L, "abc", NULL_VALUE, NULL_VALUE, 6f, 6.0, hyperLogLogCollectorClassName} ) ); } @@ -463,7 +464,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.FORBIDDEN_DATASOURCE) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("__time", "cnt", "dim1", "dim2", "m1", "m2", "unique_dim1") .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) @@ -471,7 +472,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ), ImmutableList.of( new Object[]{ - T("2000-01-01"), + t("2000-01-01"), 1L, "forbidden", "abcd", @@ -491,9 +492,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -528,7 +529,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1") .limit(2) .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) @@ -536,8 +537,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0, HLLC_STRING}, - new Object[]{T("2000-01-02"), 1L, "10.1", NULL_VALUE, "[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING} + new Object[]{t("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0, HLLC_STRING}, + new Object[]{t("2000-01-02"), 1L, "10.1", NULL_VALUE, "[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING} ) ); } @@ -550,9 +551,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .virtualColumns( - EXPRESSION_VIRTUAL_COLUMN("v0", "substring(\"dim2\", 0, 1)", ValueType.STRING) + expression_Virtual_Column("v0", "substring(\"dim2\", 0, 1)", ValueType.STRING) ) .columns("v0") .limit(2) @@ -578,7 +579,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimensions(ImmutableList.of("dummy")) .metrics(ImmutableList.of("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1")) @@ -588,8 +589,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{T("2001-01-03"), 1L, "abc", NULL_VALUE, NULL_VALUE, 6f, 6d, HLLC_STRING}, - new Object[]{T("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5f, 5d, HLLC_STRING} + new Object[]{t("2001-01-03"), 1L, "abc", NULL_VALUE, NULL_VALUE, 6f, 6d, HLLC_STRING}, + new Object[]{t("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5f, 5d, HLLC_STRING} ) ); } @@ -605,7 +606,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimensions(ImmutableList.of("dummy")) .metrics(ImmutableList.of("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1")) @@ -615,7 +616,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build(), Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimensions(ImmutableList.of("dummy")) .metrics(ImmutableList.of("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1")) @@ -631,12 +632,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING}, - new Object[]{T("2000-01-02"), 1L, "10.1", NULL_VALUE, "[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING}, - new Object[]{T("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING}, - new Object[]{T("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, HLLC_STRING}, - new Object[]{T("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5f, 5.0, HLLC_STRING}, - new Object[]{T("2001-01-03"), 1L, "abc", NULL_VALUE, NULL_VALUE, 6f, 6.0, HLLC_STRING} + new Object[]{t("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING}, + new Object[]{t("2000-01-02"), 1L, "10.1", NULL_VALUE, "[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING}, + new Object[]{t("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING}, + new Object[]{t("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, HLLC_STRING}, + new Object[]{t("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5f, 5.0, HLLC_STRING}, + new Object[]{t("2001-01-03"), 1L, "abc", NULL_VALUE, NULL_VALUE, 6f, 6.0, HLLC_STRING} ) ); } @@ -649,7 +650,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim2") .limit(2) .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) @@ -671,8 +672,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .dimensionSpecs(DIMS(new DefaultDimensionSpec("dim1", "d1"))) + .intervals(querySegmentSpec(Filtration.eternity())) + .dimensionSpecs(dimensionSpec(new DefaultDimensionSpec("dim1", "d1"))) .granularity(Granularities.ALL) .descending(true) .dimensions(ImmutableList.of("dummy")) @@ -698,8 +699,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new GroupByQuery.Builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setInterval(querySegmentSpec(Filtration.eternity())) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setGranularity(Granularities.ALL) .setLimitSpec( new DefaultLimitSpec( @@ -741,15 +742,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1") - .filters(NOT(SELECTOR("dim1", "", null))) + .filters(not(selector("dim1", "", null))) .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) .build(), newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1", "dim2") .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) @@ -796,10 +797,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -817,10 +818,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -839,10 +840,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -876,15 +877,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.MONTH) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 3L}, - new Object[]{T("2001-01-01"), 3L} + new Object[]{t("2000-01-01"), 3L}, + new Object[]{t("2001-01-01"), 3L} ) ); } @@ -897,10 +898,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -930,10 +931,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("m1", "d0", ValueType.FLOAT))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("m1", "d0", ValueType.FLOAT))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -956,10 +957,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("m2", "d0", ValueType.DOUBLE))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("m2", "d0", ValueType.DOUBLE))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -982,10 +983,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) - .filters(SELECTOR("m1", "1.0", null)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) + .filters(selector("m1", "1.0", null)) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1003,10 +1004,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) - .filters(SELECTOR("m2", "1.0", null)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) + .filters(selector("m2", "1.0", null)) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1024,10 +1025,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) - .setHavingSpec(HAVING(NUMERIC_SELECTOR("a0", "21", null))) + .setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("a0", "m1"))) + .setHavingSpec(having(numeric_Selector("a0", "21", null))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -1045,12 +1046,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setAggregatorSpecs(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("a0", "m1"))) .setHavingSpec( - HAVING( + having( new BoundDimFilter( "a0", "1", @@ -1084,11 +1085,11 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) .setAggregatorSpecs( - AGGS( + aggregators( new CardinalityAggregatorFactory( "a0", null, @@ -1101,8 +1102,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ) ) .setHavingSpec( - HAVING( - BOUND( + having( + bound( "a0", "1", null, @@ -1141,10 +1142,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim2", "d0", ValueType.STRING), new DefaultDimensionSpec("m1", "d1", ValueType.FLOAT) ) @@ -1153,13 +1154,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("d0", "_d0", ValueType.STRING))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0", "_d0", ValueType.STRING))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setHavingSpec( - HAVING( - BOUND( + having( + bound( "a0", "1", null, @@ -1190,17 +1191,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest { testQuery( PLANNER_CONFIG_FALLBACK, - "SELECT dim1, CAST(SUM(m1) AS FLOAT) AS m1_sum FROM druid.foo GROUP BY dim1 HAVING CAST(SUM(m1) AS FLOAT) > 1", + "SELECT dim1, CASt(SUM(m1) AS FLOAT) AS m1_sum FROM druid.foo GROUP BY dim1 HAVING CAST(SUM(m1) AS FLOAT) > 1", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setAggregatorSpecs(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("a0", "m1"))) .setHavingSpec( - HAVING( + having( new BoundDimFilter( "a0", "1", @@ -1234,14 +1235,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(EXPRESSION_FILTER("((\"m1\" - 1) == \"dim1\")")) - .setDimensions(DIMS( + .setDimFilter(expressionFilter("((\"m1\" - 1) == \"dim1\")")) + .setDimensions(dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("m1", "d1", ValueType.FLOAT) )) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -1271,20 +1272,20 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setAggregatorSpecs(AGGS( + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(aggregators( new FilteredAggregatorFactory( new CountAggregatorFactory("a0"), - NOT(SELECTOR("dim2", "a", null)) + not(selector("dim2", "a", null)) ), new CountAggregatorFactory("a1") )) .setPostAggregatorSpecs(ImmutableList.of( - EXPRESSION_POST_AGG("p0", "(\"a0\" / \"a1\")") + expresionPostAgg("p0", "(\"a0\" / \"a1\")") )) - .setHavingSpec(HAVING(EXPRESSION_FILTER("((\"a0\" / \"a1\") == 1)"))) + .setHavingSpec(having(expressionFilter("((\"a0\" / \"a1\") == 1)"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -1309,11 +1310,11 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setPostAggregatorSpecs(ImmutableList.of( - EXPRESSION_POST_AGG("p0", "substring(\"d0\", 1, -1)") + expresionPostAgg("p0", "substring(\"d0\", 1, -1)") )) .setContext(QUERY_CONTEXT_DEFAULT) .build() @@ -1342,12 +1343,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setPostAggregatorSpecs(ImmutableList.of( - EXPRESSION_POST_AGG("p0", "substring(\"d0\", 1, -1)"), - EXPRESSION_POST_AGG("p1", "strlen(\"d0\")") + expresionPostAgg("p0", "substring(\"d0\", 1, -1)"), + expresionPostAgg("p1", "strlen(\"d0\")") )) .setLimitSpec(new DefaultLimitSpec( ImmutableList.of( @@ -1391,11 +1392,11 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim1", "d0")) .postAggregators(ImmutableList.of( - EXPRESSION_POST_AGG("p0", "substring(\"d0\", 1, -1)") + expresionPostAgg("p0", "substring(\"d0\", 1, -1)") )) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) .threshold(10) @@ -1428,12 +1429,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim1", "d0")) .postAggregators(ImmutableList.of( - EXPRESSION_POST_AGG("p0", "substring(\"d0\", 1, -1)"), - EXPRESSION_POST_AGG("p1", "strlen(\"d0\")") + expresionPostAgg("p0", "substring(\"d0\", 1, -1)"), + expresionPostAgg("p1", "strlen(\"d0\")") )) .metric(new NumericTopNMetricSpec("p1")) .threshold(10) @@ -1459,23 +1460,23 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build(), Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build(), Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1493,16 +1494,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build(), Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1526,9 +1527,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) + .aggregators(aggregators(new DoubleSumAggregatorFactory("a0", "m1"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1552,10 +1553,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) - .postAggregators(ImmutableList.of(EXPRESSION_POST_AGG("p0", "(\"a0\" / 10)"))) + .aggregators(aggregators(new DoubleSumAggregatorFactory("a0", "m1"))) + .postAggregators(ImmutableList.of(expresionPostAgg("p0", "(\"a0\" / 10)"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1580,10 +1581,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) - .setHavingSpec(HAVING(NUMERIC_SELECTOR("a0", "21", null))) + .setAggregatorSpecs(aggregators(new DoubleSumAggregatorFactory("a0", "m1"))) + .setHavingSpec(having(numeric_Selector("a0", "21", null))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -1612,10 +1613,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "case_searched(" + "(CAST(timestamp_extract(\"__time\",'DAY','UTC'), 'DOUBLE') == \"m1\")," @@ -1628,8 +1629,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ValueType.STRING ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0"))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -1653,17 +1654,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "case_searched(((\"m1\" > 1) && (\"m1\" < 5) && (\"cnt\" == 1)),'x',null)", ValueType.STRING ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0"))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -1684,10 +1685,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),1,isnull(\"dim2\"))")) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(expressionFilter("case_searched((\"dim2\" == 'a'),1,isnull(\"dim2\"))")) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1711,12 +1712,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a')," - + (NullHandling.replaceWithDefault() ? "1" : "0") - + ",(\"dim2\" == ''))")) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(expressionFilter("case_searched((\"dim2\" == 'a')," + + (NullHandling.replaceWithDefault() ? "1" : "0") + + ",(\"dim2\" == ''))")) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1740,12 +1741,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a')," - + (NullHandling.replaceWithDefault() ? "1" : "0") - + ",(\"dim2\" == null))")) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(expressionFilter("case_searched((\"dim2\" == 'a')," + + (NullHandling.replaceWithDefault() ? "1" : "0") + + ",(\"dim2\" == null))")) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1769,17 +1770,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "case_searched(notnull(\"dim2\"),\"dim2\",\"dim1\")", ValueType.STRING ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.STRING))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.STRING))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -1810,10 +1811,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(SELECTOR("dim2", null, null)) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(selector("dim2", null, null)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1871,11 +1872,11 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .filters( - OR( - BOUND("dim1", "d", null, true, false, null, StringComparators.LEXICOGRAPHIC), - SELECTOR("dim2", "a", null) + or( + bound("dim1", "d", null, true, false, null, StringComparators.LEXICOGRAPHIC), + selector("dim2", "a", null) ) ) .columns("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1") @@ -1884,9 +1885,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0d, HLLC_STRING}, - new Object[]{T("2001-01-01"), 1L, "1", "a", "", 4.0f, 4.0d, HLLC_STRING}, - new Object[]{T("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5.0f, 5.0d, HLLC_STRING} + new Object[]{t("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0d, HLLC_STRING}, + new Object[]{t("2001-01-01"), 1L, "1", "a", "", 4.0f, 4.0d, HLLC_STRING}, + new Object[]{t("2001-01-02"), 1L, "def", "abc", NULL_VALUE, 5.0f, 5.0d, HLLC_STRING} ) ); } @@ -1924,10 +1925,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(SELECTOR("dim1", "foobar", null)) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(selector("dim1", "foobar", null)) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0"), new LongMaxAggregatorFactory("a1", "cnt") )) @@ -1946,10 +1947,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(SELECTOR("dim1", "foobar", null)) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(selector("dim1", "foobar", null)) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CountAggregatorFactory("a0"), new LongMaxAggregatorFactory("a1", "cnt") )) @@ -1968,9 +1969,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -1988,12 +1989,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new FilteredAggregatorFactory( new CountAggregatorFactory("a0"), - NOT(SELECTOR("dim2", null, null)) + not(selector("dim2", null, null)) ) )) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2017,12 +2018,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new FilteredAggregatorFactory( new CountAggregatorFactory("a0"), - EXPRESSION_FILTER( + expressionFilter( "notnull(case_searched((\"dim2\" == 'abc'),'yes',(\"dim2\" == 'def'),'yes'," + DruidExpression.nullLiteral() + "))" @@ -2046,9 +2047,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2067,13 +2068,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(AND( - SELECTOR("dim2", "a", null), - NOT(SELECTOR("dim1", "z", new SubstringDimExtractionFn(0, 1))) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(and( + selector("dim2", "a", null), + not(selector("dim1", "z", new SubstringDimExtractionFn(0, 1))) )) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2091,13 +2092,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(AND( - SELECTOR("dim2", "a", null), - NOT(SELECTOR("dim1", "z", new SubstringDimExtractionFn(0, 1))) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(and( + selector("dim2", "a", null), + not(selector("dim1", "z", new SubstringDimExtractionFn(0, 1))) )) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2142,15 +2143,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - OR( + or( new LikeDimFilter("dim1", "a%", null, null), new LikeDimFilter("dim2", "%xb%", "x", null) ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2168,15 +2169,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - OR( - BOUND("cnt", "3", null, false, false, null, StringComparators.NUMERIC), - SELECTOR("cnt", "1", null) + or( + bound("cnt", "3", null, false, false, null, StringComparators.NUMERIC), + selector("cnt", "1", null) ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2194,12 +2195,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - BOUND("cnt", "1.1", "100000001.0", true, true, null, StringComparators.NUMERIC) + bound("cnt", "1.1", "100000001.0", true, true, null, StringComparators.NUMERIC) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2211,12 +2212,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - SELECTOR("cnt", "1.0", null) + selector("cnt", "1.0", null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2230,12 +2231,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - SELECTOR("cnt", "100000001.0", null) + selector("cnt", "100000001.0", null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2247,12 +2248,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - IN("cnt", ImmutableList.of("1.0", "100000001.0"), null) + in("cnt", ImmutableList.of("1.0", "100000001.0"), null) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2270,10 +2271,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(IN("cnt", ImmutableList.of("1", "2"), null)) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(in("cnt", ImmutableList.of("1", "2"), null)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2293,15 +2294,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter( - OR( - SELECTOR("dim1", "10", null), - AND( - EXPRESSION_FILTER("(floor(CAST(\"dim1\", 'DOUBLE')) == 10.00)"), - BOUND("dim1", "9", "10.5", true, false, null, StringComparators.NUMERIC) + or( + selector("dim1", "10", null), + and( + expressionFilter("(floor(CAST(\"dim1\", 'DOUBLE')) == 10.00)"), + bound("dim1", "9", "10.5", true, false, null, StringComparators.NUMERIC) ) ) ) @@ -2322,14 +2323,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators( - AGGS( + aggregators( new CountAggregatorFactory("a0"), new FilteredAggregatorFactory( new CountAggregatorFactory("a1"), - NOT(SELECTOR("dim1", null, null)) + not(selector("dim1", null, null)) ), new LongSumAggregatorFactory("a2:sum", "cnt"), new CountAggregatorFactory("a2:count"), @@ -2338,7 +2339,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new LongMaxAggregatorFactory("a5", "cnt"), new FilteredAggregatorFactory( new CountAggregatorFactory("a6"), - NOT(SELECTOR("dim2", null, null)) + not(selector("dim2", null, null)) ) ) ) @@ -2351,7 +2352,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new FieldAccessPostAggregator(null, "a2:count") ) ), - EXPRESSION_POST_AGG("p0", "((\"a3\" + \"a4\") + \"a5\")") + expresionPostAgg("p0", "((\"a3\" + \"a4\") + \"a5\")") ) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2376,17 +2377,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim1", "d0")) .metric(new InvertedTopNMetricSpec(new NumericTopNMetricSpec("p0"))) - .aggregators(AGGS( + .aggregators(aggregators( new FloatMinAggregatorFactory("a0", "m1"), new FloatMaxAggregatorFactory("a1", "m1") )) .postAggregators( ImmutableList.of( - EXPRESSION_POST_AGG("p0", "(\"a0\" + \"a1\")") + expresionPostAgg("p0", "(\"a0\" + \"a1\")") ) ) .threshold(3) @@ -2413,14 +2414,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setAggregatorSpecs( new FloatMinAggregatorFactory("a0", "m1"), new FloatMaxAggregatorFactory("a1", "m1") ) - .setPostAggregatorSpecs(ImmutableList.of(EXPRESSION_POST_AGG("p0", "(\"a0\" + \"a1\")"))) + .setPostAggregatorSpecs(ImmutableList.of(expresionPostAgg("p0", "(\"a0\" + \"a1\")"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -2457,16 +2458,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setAggregatorSpecs( new FloatMinAggregatorFactory("a0", "m1"), new FloatMaxAggregatorFactory("a1", "m1") ) .setPostAggregatorSpecs( ImmutableList.of( - EXPRESSION_POST_AGG("p0", "(\"a0\" + \"a1\")") + expresionPostAgg("p0", "(\"a0\" + \"a1\")") ) ) .setLimitSpec( @@ -2513,68 +2514,68 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new FilteredAggregatorFactory( new LongSumAggregatorFactory("a0", "cnt"), - SELECTOR("dim1", "abc", null) + selector("dim1", "abc", null) ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("a1", "cnt"), - NOT(SELECTOR("dim1", "abc", null)) + not(selector("dim1", "abc", null)) ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("a2", "cnt"), - SELECTOR("dim1", "a", new SubstringDimExtractionFn(0, 1)) + selector("dim1", "a", new SubstringDimExtractionFn(0, 1)) ), new FilteredAggregatorFactory( new CountAggregatorFactory("a3"), - AND( - NOT(SELECTOR("dim2", null, null)), - NOT(SELECTOR("dim1", "1", null)) + and( + not(selector("dim2", null, null)), + not(selector("dim1", "1", null)) ) ), new FilteredAggregatorFactory( new CountAggregatorFactory("a4"), - NOT(SELECTOR("dim1", "1", null)) + not(selector("dim1", "1", null)) ), new FilteredAggregatorFactory( new CountAggregatorFactory("a5"), - NOT(SELECTOR("dim1", "1", null)) + not(selector("dim1", "1", null)) ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("a6", "cnt"), - SELECTOR("dim2", "a", null) + selector("dim2", "a", null) ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("a7", "cnt"), - AND( - SELECTOR("dim2", "a", null), - NOT(SELECTOR("dim1", "1", null)) + and( + selector("dim2", "a", null), + not(selector("dim1", "1", null)) ) ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("a8", "cnt"), - NOT(SELECTOR("dim1", "1", null)) + not(selector("dim1", "1", null)) ), new FilteredAggregatorFactory( new LongMaxAggregatorFactory("a9", "cnt"), - NOT(SELECTOR("dim1", "1", null)) + not(selector("dim1", "1", null)) ), new FilteredAggregatorFactory( new CardinalityAggregatorFactory( "a10", null, - DIMS(new DefaultDimensionSpec("m1", "m1", ValueType.FLOAT)), + dimensionSpec(new DefaultDimensionSpec("m1", "m1", ValueType.FLOAT)), false, true ), - NOT(SELECTOR("dim1", "1", null)) + not(selector("dim1", "1", null)) ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("a11", "cnt"), - AND(SELECTOR("dim2", "a", null), SELECTOR("dim1", "b", null)) + and(selector("dim2", "a", null), selector("dim1", "b", null)) ) )) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2602,17 +2603,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS( + .setDimensions(dimensionSpec(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators( new FilteredAggregatorFactory( new CountAggregatorFactory("a0"), - NOT(SELECTOR("dim1", "1", null)) + not(selector("dim1", "1", null)) ), new LongSumAggregatorFactory("a1", "cnt") )) - .setPostAggregatorSpecs(ImmutableList.of(EXPRESSION_POST_AGG("p0", "(\"a0\" + \"a1\")"))) + .setPostAggregatorSpecs(ImmutableList.of(expresionPostAgg("p0", "(\"a0\" + \"a1\")"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -2633,19 +2634,19 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators( - AGGS( + aggregators( new FilteredAggregatorFactory( new CountAggregatorFactory("a0"), - NOT(SELECTOR("dim1", "1", null)) + not(selector("dim1", "1", null)) ), new FilteredAggregatorFactory( new CountAggregatorFactory("a1"), - AND( - NOT(SELECTOR("dim2", null, null)), - NOT(SELECTOR("dim1", "1", null)) + and( + not(selector("dim2", null, null)), + not(selector("dim1", "1", null)) ) ) ) @@ -2679,9 +2680,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new LongSumAggregatorFactory("a0", null, "(\"cnt\" * 3)", macroTable), new LongSumAggregatorFactory("a1", "cnt"), new DoubleSumAggregatorFactory("a2", "m1"), @@ -2689,8 +2690,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new DoubleMaxAggregatorFactory("a4", null, "(strlen(\"dim2\") + log(\"m1\"))", macroTable) )) .postAggregators( - EXPRESSION_POST_AGG("p0", "log((\"a1\" + \"a2\"))"), - EXPRESSION_POST_AGG("p1", "(\"a1\" % 4)") + expresionPostAgg("p0", "log((\"a1\" + \"a2\"))"), + expresionPostAgg("p1", "(\"a1\" % 4)") ) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2715,14 +2716,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN("d0:v", "(floor((\"m1\" / 2)) * 2)", ValueType.FLOAT) + expression_Virtual_Column("d0:v", "(floor((\"m1\" / 2)) * 2)", ValueType.FLOAT) ) - .setDimFilter(EXPRESSION_FILTER("((floor((\"m1\" / 2)) * 2) > -1)")) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.FLOAT))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimFilter(expressionFilter("((floor((\"m1\" / 2)) * 2) > -1)")) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.FLOAT))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -2761,16 +2762,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN("d0:v", "((CAST(\"m1\", 'LONG') / 2) * 2)", ValueType.LONG) + expression_Virtual_Column("d0:v", "((CAST(\"m1\", 'LONG') / 2) * 2)", ValueType.LONG) ) .setDimFilter( - EXPRESSION_FILTER("(((CAST(\"m1\", 'LONG') / 2) * 2) > -1)") + expressionFilter("(((CAST(\"m1\", 'LONG') / 2) * 2) > -1)") ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -2809,20 +2810,20 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "(floor((CAST(\"dim1\", 'DOUBLE') / 2)) * 2)", ValueType.FLOAT ) ) .setDimFilter( - EXPRESSION_FILTER("((floor((CAST(\"dim1\", 'DOUBLE') / 2)) * 2) > -1)") + expressionFilter("((floor((CAST(\"dim1\", 'DOUBLE') / 2)) * 2) > -1)") ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.FLOAT))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.FLOAT))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -2860,12 +2861,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter(new InDimFilter("dim1", ImmutableList.of("abc", "def", "ghi"), null)) .setAggregatorSpecs( - AGGS( + aggregators( new CountAggregatorFactory("a0") ) ) @@ -2899,12 +2900,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter(new InDimFilter("dim1", elements, null)) .setAggregatorSpecs( - AGGS( + aggregators( new CountAggregatorFactory("a0") ) ) @@ -2926,18 +2927,18 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - AND( - SELECTOR("dim2", "a", null), - OR( - BOUND("dim1", "a", null, true, false, null, StringComparators.LEXICOGRAPHIC), - NOT(SELECTOR("dim1", null, null)) + and( + selector("dim2", "a", null), + or( + bound("dim1", "a", null, true, false, null, StringComparators.LEXICOGRAPHIC), + not(selector("dim1", null, null)) ) ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2965,10 +2966,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(BOUND("m1", "2.5", "3.5", true, true, null, StringComparators.NUMERIC)) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(bound("m1", "2.5", "3.5", true, true, null, StringComparators.NUMERIC)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -2986,10 +2987,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(BOUND("dim1", "a", "b", false, true, null, StringComparators.LEXICOGRAPHIC)) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(bound("dim1", "a", "b", false, true, null, StringComparators.LEXICOGRAPHIC)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3007,10 +3008,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(SELECTOR("dim1", "abc", null)) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(selector("dim1", "abc", null)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3028,10 +3029,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .filters(NUMERIC_SELECTOR("dim1", "2", null)) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .filters(numeric_Selector("dim1", "2", null)) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3050,9 +3051,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-01/2001-01-01"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3078,9 +3079,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000/2001"), Intervals.of("2010/2011"))) + .intervals(querySegmentSpec(Intervals.of("2000/2001"), Intervals.of("2010/2011"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3101,13 +3102,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals( - QSS( + querySegmentSpec( Intervals.of("2000-01-01T00:00:00.111/2000-01-01T00:00:00.112"), Intervals.of("2000-01-01T00:00:00.888/2000-01-02T00:00:00.222") ) ) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3131,14 +3132,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals( - QSS( + querySegmentSpec( Intervals.of("2000-01-01/2001-01-01"), Intervals.of("2001-02-01/2001-02-02"), Intervals.of("2001-03-01/2001-03-02T00:00:00.001") ) ) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3176,9 +3177,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-01/2000-01-01T00:00:00.001"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-01/2000-01-01T00:00:00.001"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3198,13 +3199,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals( - QSS( + querySegmentSpec( Intervals.of("2000-01-01/2000-01-01T00:00:00.001"), Intervals.of("2000-01-02/2000-01-02T00:00:00.001") ) ) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3230,21 +3231,21 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000/2001"), Intervals.of("2002-05-01/2003-05-01"))) + .intervals(querySegmentSpec(Intervals.of("2000/2001"), Intervals.of("2002-05-01/2003-05-01"))) .granularity(Granularities.ALL) .filters( - AND( - SELECTOR("dim2", "a", null), - OR( - TIME_BOUND("2000/2001"), - AND( - SELECTOR("dim1", "abc", null), - TIME_BOUND("2002-05-01/2003-05-01") + and( + selector("dim2", "a", null), + or( + timeBound("2000/2001"), + and( + selector("dim1", "abc", null), + timeBound("2002-05-01/2003-05-01") ) ) ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3271,21 +3272,21 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .filters( - OR( - NOT(SELECTOR("dim2", "a", null)), - AND( - NOT(TIME_BOUND("2000/2001")), - NOT(AND( - SELECTOR("dim1", "abc", null), - TIME_BOUND("2002-05-01/2003-05-01") + or( + not(selector("dim2", "a", null)), + and( + not(timeBound("2000/2001")), + not(and( + selector("dim1", "abc", null), + timeBound("2002-05-01/2003-05-01") )) ) ) ) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3307,15 +3308,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals( - QSS( + querySegmentSpec( new Interval(DateTimes.MIN, DateTimes.of("2000")), Intervals.of("2001/2003"), new Interval(DateTimes.of("2004"), DateTimes.MAX) ) ) - .filters(NOT(SELECTOR("dim1", "xxx", null))) + .filters(not(selector("dim1", "xxx", null))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3335,10 +3336,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) - .filters(NOT(SELECTOR("dim2", "a", null))) + .intervals(querySegmentSpec(Intervals.of("2000-01-01/2001-01-01"))) + .filters(not(selector("dim2", "a", null))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3358,14 +3359,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .filters( - OR( - NOT(SELECTOR("dim2", "a", null)), - BOUND( + or( + not(selector("dim2", "a", null)), + bound( "__time", - String.valueOf(T("2000-01-01")), - String.valueOf(T("2000-12-31T23:59:59.999")), + String.valueOf(t("2000-01-01")), + String.valueOf(t("2000-12-31T23:59:59.999")), false, false, null, @@ -3374,7 +3375,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ) ) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3394,10 +3395,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - BOUND( + bound( "cnt", String.valueOf(DateTimes.of("1970-01-01").getMillis()), String.valueOf(DateTimes.of("1970-01-02").getMillis()), @@ -3407,7 +3408,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest StringComparators.NUMERIC ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3427,10 +3428,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - BOUND( + bound( "cnt", String.valueOf(DateTimes.of("1970-01-01").getMillis()), String.valueOf(DateTimes.of("1970-01-02").getMillis()), @@ -3440,7 +3441,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest StringComparators.NUMERIC ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3460,10 +3461,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .filters( - BOUND( + bound( "cnt", String.valueOf(DateTimes.of("1970-01-01").getMillis()), String.valueOf(DateTimes.of("1970-01-02").getMillis()), @@ -3473,7 +3474,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest StringComparators.NUMERIC ) ) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -3491,9 +3492,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new LongSumAggregatorFactory( "a0", null, @@ -3518,9 +3519,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new LongSumAggregatorFactory( "a0", null, @@ -3554,13 +3555,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new GroupByQuery.Builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"cnt\",'P1Y',null,'UTC')", ValueType.LONG) + expression_Virtual_Column("d0:v", "timestamp_floor(\"cnt\",'P1Y',null,'UTC')", ValueType.LONG) ) .setDimFilter( - BOUND( + bound( "cnt", String.valueOf(DateTimes.of("1970-01-01").getMillis()), String.valueOf(DateTimes.of("1970-01-02").getMillis()), @@ -3570,13 +3571,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest StringComparators.NUMERIC ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{T("1970-01-01"), 6L} + new Object[]{t("1970-01-01"), 6L} ) ); } @@ -3589,20 +3590,20 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter( - OR( - SELECTOR( + or( + selector( "dim1", "e", - CASCADE( + cascade( new SubstringDimExtractionFn(1, null), new SubstringDimExtractionFn(0, 1) ) ), - SELECTOR("dim2", "a", null) + selector("dim2", "a", null) ) ) .setContext(QUERY_CONTEXT_DEFAULT) @@ -3625,13 +3626,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter( - OR( - EXPRESSION_FILTER("(strlen(\"dim1\") == 3)"), - EXPRESSION_FILTER("(CAST(strlen(\"dim1\"), 'STRING') == 3)") + or( + expressionFilter("(strlen(\"dim1\") == 3)"), + expressionFilter("(CAST(strlen(\"dim1\"), 'STRING') == 3)") ) ) .setContext(QUERY_CONTEXT_DEFAULT) @@ -3654,7 +3655,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) @@ -3685,7 +3686,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) @@ -3716,7 +3717,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) @@ -3759,7 +3760,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) .metric(new InvertedTopNMetricSpec(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC))) @@ -3790,15 +3791,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators( - AGGS( + aggregators( new LongSumAggregatorFactory("a0", "cnt"), new CardinalityAggregatorFactory( "a1", null, - DIMS(new DefaultDimensionSpec("dim2", null)), + dimensionSpec(new DefaultDimensionSpec("dim2", null)), false, true ), @@ -3826,10 +3827,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators( - AGGS( + aggregators( new FilteredAggregatorFactory( new CardinalityAggregatorFactory( "a0", @@ -3838,7 +3839,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest false, true ), - BOUND("m1", "4", null, false, false, null, StringComparators.NUMERIC) + bound("m1", "4", null, false, false, null, StringComparators.NUMERIC) ), new FilteredAggregatorFactory( new CardinalityAggregatorFactory( @@ -3848,11 +3849,11 @@ public class CalciteQueryTest extends BaseCalciteQueryTest false, true ), - BOUND("m1", "4", null, false, false, null, StringComparators.NUMERIC) + bound("m1", "4", null, false, false, null, StringComparators.NUMERIC) ), new FilteredAggregatorFactory( new HyperUniquesAggregatorFactory("a2", "unique_dim1", false, true), - BOUND("m1", "4", null, false, false, null, StringComparators.NUMERIC) + bound("m1", "4", null, false, false, null, StringComparators.NUMERIC) ) ) ) @@ -3880,19 +3881,19 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new FilteredAggregatorFactory( new CountAggregatorFactory("a0"), - NOT(SELECTOR("d0", null, null)) + not(selector("d0", null, null)) ) )) .setContext(QUERY_CONTEXT_DEFAULT) @@ -3916,14 +3917,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators( - AGGS( + aggregators( new CardinalityAggregatorFactory( "a0", null, - DIMS(new DefaultDimensionSpec("dim2", null)), + dimensionSpec(new DefaultDimensionSpec("dim2", null)), false, true ) @@ -3953,25 +3954,25 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS( + .setDimensions(dimensionSpec( new DefaultDimensionSpec("dim2", "d0"), new DefaultDimensionSpec("dim1", "d1") )) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("d0", "_d0"))) - .setAggregatorSpecs(AGGS( + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0", "_d0"))) + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("_a0", "a0"), new FilteredAggregatorFactory( new CountAggregatorFactory("_a1"), - NOT(SELECTOR("d1", null, null)) + not(selector("d1", null, null)) ) )) .setContext(QUERY_CONTEXT_DEFAULT) @@ -4007,18 +4008,18 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .virtualColumns( - EXPRESSION_VIRTUAL_COLUMN("a4:v", "concat(substring(\"dim2\", 0, 1),'x')", ValueType.STRING) + expression_Virtual_Column("a4:v", "concat(substring(\"dim2\", 0, 1),'x')", ValueType.STRING) ) .aggregators( - AGGS( + aggregators( new LongSumAggregatorFactory("a0", "cnt"), new CardinalityAggregatorFactory( "a1", null, - DIMS(new DefaultDimensionSpec("dim2", "dim2")), + dimensionSpec(new DefaultDimensionSpec("dim2", "dim2")), false, true ), @@ -4026,16 +4027,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new CardinalityAggregatorFactory( "a2", null, - DIMS(new DefaultDimensionSpec("dim2", "dim2")), + dimensionSpec(new DefaultDimensionSpec("dim2", "dim2")), false, true ), - NOT(SELECTOR("dim2", "", null)) + not(selector("dim2", "", null)) ), new CardinalityAggregatorFactory( "a3", null, - DIMS( + dimensionSpec( new ExtractionDimensionSpec( "dim2", "dim2", @@ -4049,7 +4050,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new CardinalityAggregatorFactory( "a4", null, - DIMS(new DefaultDimensionSpec("a4:v", "a4:v", ValueType.STRING)), + dimensionSpec(new DefaultDimensionSpec("a4:v", "a4:v", ValueType.STRING)), false, true ), @@ -4093,31 +4094,31 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .setDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS( + .setDimensions(dimensionSpec( new DefaultDimensionSpec("m2", "d0", ValueType.DOUBLE), new DefaultDimensionSpec("dim1", "d1") )) .setDimFilter(new SelectorDimFilter("m1", "5.0", null)) - .setAggregatorSpecs(AGGS(new LongMaxAggregatorFactory("a0", "__time"))) + .setAggregatorSpecs(aggregators(new LongMaxAggregatorFactory("a0", "__time"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "_d0:v", "timestamp_floor(\"a0\",'PT1H',null,'UTC')", ValueType.LONG ) ) - .setDimensions(DIMS( + .setDimensions(dimensionSpec( new DefaultDimensionSpec("_d0:v", "_d0", ValueType.LONG), new DefaultDimensionSpec("d1", "_d1", ValueType.STRING) )) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new CountAggregatorFactory("_a0") )) .setContext(QUERY_CONTEXT_DEFAULT) @@ -4151,26 +4152,26 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .setDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS( + .setDimensions(dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") )) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("d1", "_d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("_a0", "a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d1", "_d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("_a0", "a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("a0", "_a0"), new CountAggregatorFactory("a1") )) @@ -4230,17 +4231,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("_a0", "a0"), new CountAggregatorFactory("_a1") )) @@ -4274,24 +4275,28 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_floor(\"__time\",'P1D',null,'UTC')", ValueType.LONG ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec( + "d0:v", + "d0", + ValueType.LONG + ))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongMaxAggregatorFactory("_a0", "a0"), new LongMinAggregatorFactory("_a1", "a0"), new LongSumAggregatorFactory("_a2:sum", "a0"), @@ -4309,7 +4314,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new FieldAccessPostAggregator(null, "_a2:count") ) ), - EXPRESSION_POST_AGG("s0", "timestamp_extract(\"_a3\",'EPOCH','UTC')") + expresionPostAgg("s0", "timestamp_extract(\"_a3\",'EPOCH','UTC')") ) ) .setLimit(1) @@ -4333,22 +4338,30 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_floor(\"__time\",'P1D',null,'UTC')", ValueType.LONG ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec( + "d0:v", + "d0", + ValueType.LONG + ))) .setAggregatorSpecs( - AGGS( + aggregators( new CardinalityAggregatorFactory( "a0:a", null, - DIMS(new DefaultDimensionSpec("cnt", "cnt", ValueType.LONG)), + dimensionSpec(new DefaultDimensionSpec( + "cnt", + "cnt", + ValueType.LONG + )), false, true ) @@ -4363,9 +4376,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("_a0:sum", "a0"), new CountAggregatorFactory("_a0:count") )) @@ -4392,8 +4405,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest public void testTopNFilterJoin() throws Exception { DimFilter filter = NullHandling.replaceWithDefault() ? - IN("dim2", Arrays.asList(null, "a"), null) - : SELECTOR("dim2", "a", null); + in("dim2", Arrays.asList(null, "a"), null) + : selector("dim2", "a", null); // Filters on top N values of some dimension by using an inner join. testQuery( "SELECT t1.dim1, SUM(t1.cnt)\n" @@ -4412,21 +4425,21 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .metric(new NumericTopNMetricSpec("a0")) .threshold(2) .context(QUERY_CONTEXT_DEFAULT) .build(), GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimFilter(filter) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -4479,21 +4492,21 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .metric(new NumericTopNMetricSpec("a0")) .threshold(2) .context(QUERY_CONTEXT_DEFAULT) .build(), GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(IN("dim2", ImmutableList.of("", "a"), null)) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimFilter(in("dim2", ImmutableList.of("", "a"), null)) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -4541,10 +4554,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -4586,10 +4599,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(NOT(SELECTOR("dim1", "", null))) - .setDimensions(DIMS(new ExtractionDimensionSpec( + .setDimFilter(not(selector("dim1", "", null))) + .setDimensions(dimensionSpec(new ExtractionDimensionSpec( "dim1", "d0", new SubstringDimExtractionFn(0, 1) @@ -4601,21 +4614,21 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Intervals.of("2000-01-01/2002-01-01"))) + .setInterval(querySegmentSpec(Intervals.of("2000-01-01/2002-01-01"))) .setGranularity(Granularities.ALL) - .setDimFilter(IN( + .setDimFilter(in( "dim2", ImmutableList.of("1", "2", "a", "d"), new SubstringDimExtractionFn(0, 1) )) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new CountAggregatorFactory("a0") )) .setContext(QUERY_CONTEXT_DEFAULT) @@ -4682,18 +4695,18 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) - .setDimFilter(NOT(SELECTOR("dim2", "", null))) + .setInterval(querySegmentSpec(Filtration.eternity())) + .setDimFilter(not(selector("dim2", "", null))) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("_a0", "a0"), new CountAggregatorFactory("_a1") )) @@ -4721,18 +4734,18 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) - .setDimFilter(NOT(SELECTOR("dim2", null, null))) + .setInterval(querySegmentSpec(Filtration.eternity())) + .setDimFilter(not(selector("dim2", null, null))) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("_a0", "a0"), new CountAggregatorFactory("_a1") )) @@ -4764,19 +4777,19 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimit(1) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setDimFilter(BOUND("a0", "0", null, true, false, null, StringComparators.NUMERIC)) - .setInterval(QSS(Filtration.eternity())) + .setDimFilter(bound("a0", "0", null, true, false, null, StringComparators.NUMERIC)) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("_a0", "a0"), new CountAggregatorFactory("_a1") )) @@ -4808,29 +4821,29 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(NOT(SELECTOR("dim1", "", null))) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimFilter(not(selector("dim1", "", null))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new CountAggregatorFactory("a0"), new CardinalityAggregatorFactory( "a1", null, - DIMS(new DefaultDimensionSpec("d0", null)), + dimensionSpec(new DefaultDimensionSpec("d0", null)), false, true ) )) .setPostAggregatorSpecs( ImmutableList.of( - EXPRESSION_POST_AGG("p0", "((1 - (\"a1\" / \"a0\")) * 100)") + expresionPostAgg("p0", "((1 - (\"a1\" / \"a0\")) * 100)") ) ) .setContext(QUERY_CONTEXT_DEFAULT) @@ -4857,18 +4870,18 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("a0", "_d0"))) - .setAggregatorSpecs(AGGS( + .setDimensions(dimensionSpec(new DefaultDimensionSpec("a0", "_d0"))) + .setAggregatorSpecs(aggregators( new CountAggregatorFactory("_a0") )) .setContext(QUERY_CONTEXT_DEFAULT) @@ -4902,18 +4915,18 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("a0", "_d0"))) - .setAggregatorSpecs(AGGS( + .setDimensions(dimensionSpec(new DefaultDimensionSpec("a0", "_d0"))) + .setAggregatorSpecs(aggregators( new CountAggregatorFactory("_a0") )) .setLimitSpec( @@ -4956,25 +4969,25 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .aggregators( - AGGS( + aggregators( new LongSumAggregatorFactory("a0", "cnt"), new CardinalityAggregatorFactory( "a1", null, - DIMS(new DefaultDimensionSpec("dim2", null)), + dimensionSpec(new DefaultDimensionSpec("dim2", null)), false, true ) ) ) .postAggregators( - EXPRESSION_POST_AGG("p0", "CAST(\"a1\", 'DOUBLE')"), - EXPRESSION_POST_AGG("p1", "(\"a0\" / \"a1\")"), - EXPRESSION_POST_AGG("p2", "((\"a0\" / \"a1\") + 3)"), - EXPRESSION_POST_AGG("p3", "((CAST(\"a0\", 'DOUBLE') / CAST(\"a1\", 'DOUBLE')) + 3)") + expresionPostAgg("p0", "CAST(\"a1\", 'DOUBLE')"), + expresionPostAgg("p1", "(\"a0\" / \"a1\")"), + expresionPostAgg("p2", "((\"a0\" / \"a1\") + 3)"), + expresionPostAgg("p3", "((CAST(\"a0\", 'DOUBLE') / CAST(\"a1\", 'DOUBLE')) + 3)") ) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -4993,15 +5006,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(NOT(SELECTOR("dim1", "", null))) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(not(selector("dim1", "", null))) .granularity(Granularities.ALL) .aggregators( - AGGS( + aggregators( new CardinalityAggregatorFactory( "a0", null, - DIMS( + dimensionSpec( new ExtractionDimensionSpec( "dim1", null, @@ -5032,17 +5045,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(NOT(SELECTOR("dim1", "", null))) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(not(selector("dim1", "", null))) .granularity(Granularities.ALL) - .virtualColumns(EXPRESSION_VIRTUAL_COLUMN("a0:v", "trim(\"dim1\",' ')", ValueType.STRING)) - .filters(EXPRESSION_FILTER("(trim(\"dim1\",' ') != '')")) + .virtualColumns(expression_Virtual_Column("a0:v", "trim(\"dim1\",' ')", ValueType.STRING)) + .filters(expressionFilter("(trim(\"dim1\",' ') != '')")) .aggregators( - AGGS( + aggregators( new CardinalityAggregatorFactory( "a0", null, - DIMS(new DefaultDimensionSpec("a0:v", "a0:v", ValueType.STRING)), + dimensionSpec(new DefaultDimensionSpec("a0:v", "a0:v", ValueType.STRING)), false, true ) @@ -5069,15 +5082,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setVirtualColumns(EXPRESSION_VIRTUAL_COLUMN( + .setVirtualColumns(expression_Virtual_Column( "d0:v", "(((timestamp_extract(\"__time\",'MONTH','UTC') - 1) / 3) + 1)", ValueType.LONG )) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -5100,17 +5113,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimFilter( - NOT(SELECTOR( + not(selector( "dim1", "x", new RegexDimExtractionFn("^(.)", 1, true, null) )) ) .setDimensions( - DIMS( + dimensionSpec( new ExtractionDimensionSpec( "dim1", "d0", @@ -5145,16 +5158,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim2", "d0"), new DefaultDimensionSpec("dim1", "d1") ) ) .setAggregatorSpecs( - AGGS( + aggregators( new LongSumAggregatorFactory("a0", "cnt") ) ) @@ -5192,16 +5205,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") ) ) .setAggregatorSpecs( - AGGS( + aggregators( new LongSumAggregatorFactory("a0", "cnt") ) ) @@ -5213,7 +5226,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest 4 ) ) - .setHavingSpec(HAVING(NUMERIC_SELECTOR("a0", "1", null))) + .setHavingSpec(having(numeric_Selector("a0", "1", null))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -5244,9 +5257,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000/P2M"))) + .intervals(querySegmentSpec(Intervals.of("2000/P2M"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -5267,9 +5280,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-01T01:02/2002"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-01T01:02/2002"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -5289,7 +5302,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(), ImmutableList.of( - new Object[]{T("2000-01-01T00Z", LOS_ANGELES), D("1999-12-31"), D("2000-01-01")} + new Object[]{t("2000-01-01T00Z", LOS_ANGELES), d("1999-12-31"), d("2000-01-01")} ) ); } @@ -5306,9 +5319,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) .build() ), @@ -5326,9 +5339,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-02/2002"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-02/2002"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -5352,9 +5365,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) .build() ), @@ -5374,12 +5387,12 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS( + .intervals(querySegmentSpec( new Interval(DateTimes.MIN, DateTimes.of("2001-01-01")), new Interval(DateTimes.of("2001-02-01"), DateTimes.MAX) )) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -5399,9 +5412,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval(DateTimes.MIN, DateTimes.of("2000-02-01")))) + .intervals(querySegmentSpec(new Interval(DateTimes.MIN, DateTimes.of("2000-02-01")))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -5421,9 +5434,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval(DateTimes.MIN, DateTimes.of("2000-03-01")))) + .intervals(querySegmentSpec(new Interval(DateTimes.MIN, DateTimes.of("2000-03-01")))) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -5443,13 +5456,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .filters( - AND( - EXPRESSION_FILTER("(timestamp_extract(\"__time\",'YEAR','UTC') == 2000)"), - EXPRESSION_FILTER("(timestamp_extract(\"__time\",'MONTH','UTC') == 1)") + and( + expressionFilter("(timestamp_extract(\"__time\",'YEAR','UTC') == 2000)"), + expressionFilter("(timestamp_extract(\"__time\",'MONTH','UTC') == 1)") ) ) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -5471,16 +5484,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .filters( - AND( - EXPRESSION_FILTER("(timestamp_extract(\"__time\",'YEAR','UTC') == 2000)"), - OR( - EXPRESSION_FILTER("(timestamp_extract(\"__time\",'DAY','UTC') == 2)"), - EXPRESSION_FILTER("(timestamp_extract(\"__time\",'DAY','UTC') == 3)"), - EXPRESSION_FILTER("(timestamp_extract(\"__time\",'DAY','UTC') == 5)") + and( + expressionFilter("(timestamp_extract(\"__time\",'YEAR','UTC') == 2000)"), + or( + expressionFilter("(timestamp_extract(\"__time\",'DAY','UTC') == 2)"), + expressionFilter("(timestamp_extract(\"__time\",'DAY','UTC') == 3)"), + expressionFilter("(timestamp_extract(\"__time\",'DAY','UTC') == 5)") ) ) ) @@ -5502,9 +5515,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS()) + .intervals(querySegmentSpec()) .granularity(Granularities.ALL) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -5522,13 +5535,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN("d0:v", "floor(CAST(\"dim1\", 'DOUBLE'))", ValueType.FLOAT) + expression_Virtual_Column("d0:v", "floor(CAST(\"dim1\", 'DOUBLE'))", ValueType.FLOAT) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.FLOAT))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.FLOAT))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -5549,17 +5562,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "floor(CAST(\"dim1\", 'DOUBLE'))", ValueType.FLOAT ) ) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec( "d0:v", "d0", @@ -5567,7 +5580,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ) ) ) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -5603,23 +5616,23 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_floor(\"__time\",'P1Y',null,'UTC')", ValueType.LONG ) ) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG), new DefaultDimensionSpec("dim2", "d1") ) ) .setAggregatorSpecs( - AGGS( + aggregators( new CountAggregatorFactory("a0") ) ) @@ -5650,19 +5663,19 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ), NullHandling.replaceWithDefault() ? ImmutableList.of( - new Object[]{T("2000"), "", 2L}, - new Object[]{T("2000"), "a", 1L}, - new Object[]{T("2001"), "", 1L}, - new Object[]{T("2001"), "a", 1L}, - new Object[]{T("2001"), "abc", 1L} + new Object[]{t("2000"), "", 2L}, + new Object[]{t("2000"), "a", 1L}, + new Object[]{t("2001"), "", 1L}, + new Object[]{t("2001"), "a", 1L}, + new Object[]{t("2001"), "abc", 1L} ) : ImmutableList.of( - new Object[]{T("2000"), null, 1L}, - new Object[]{T("2000"), "", 1L}, - new Object[]{T("2000"), "a", 1L}, - new Object[]{T("2001"), null, 1L}, - new Object[]{T("2001"), "a", 1L}, - new Object[]{T("2001"), "abc", 1L} + new Object[]{t("2000"), null, 1L}, + new Object[]{t("2000"), "", 1L}, + new Object[]{t("2000"), "a", 1L}, + new Object[]{t("2001"), null, 1L}, + new Object[]{t("2001"), "a", 1L}, + new Object[]{t("2001"), "abc", 1L} ) ); } @@ -5675,11 +5688,11 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setVirtualColumns(EXPRESSION_VIRTUAL_COLUMN("d0:v", "strlen(\"dim1\")", ValueType.LONG)) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setVirtualColumns(expression_Virtual_Column("d0:v", "strlen(\"dim1\")", ValueType.LONG)) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -5712,17 +5725,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimFilter( - NOT(SELECTOR( + not(selector( "dim1", "xxx", extractionFn )) ) .setDimensions( - DIMS( + dimensionSpec( new ExtractionDimensionSpec( "dim1", "d0", @@ -5732,7 +5745,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ) ) .setAggregatorSpecs( - AGGS( + aggregators( new CountAggregatorFactory("a0") ) ) @@ -5763,9 +5776,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new CardinalityAggregatorFactory( "a0", null, @@ -5796,15 +5809,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.MONTH) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{3L, T("2000-01-01")}, - new Object[]{3L, T("2001-01-01")} + new Object[]{3L, t("2000-01-01")}, + new Object[]{3L, t("2001-01-01")} ) ); } @@ -5825,15 +5838,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-01/2001-02-01"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-01/2001-02-01"))) .granularity(Granularities.ALL) - .aggregators(AGGS( + .aggregators(aggregators( new FilteredAggregatorFactory( new LongSumAggregatorFactory("a0", "cnt"), - BOUND( + bound( "__time", - String.valueOf(T("2000-01-01")), - String.valueOf(T("2000-02-01")), + String.valueOf(t("2000-01-01")), + String.valueOf(t("2000-02-01")), false, true, null, @@ -5842,10 +5855,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("a1", "cnt"), - BOUND( + bound( "__time", - String.valueOf(T("2001-01-01")), - String.valueOf(T("2001-02-01")), + String.valueOf(t("2001-01-01")), + String.valueOf(t("2001-02-01")), false, true, null, @@ -5878,17 +5891,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(new PeriodGranularity(Period.months(1), null, DateTimes.inferTzFromString(LOS_ANGELES))) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) .build() ), ImmutableList.of( - new Object[]{1L, T("1999-12-01", LOS_ANGELES)}, - new Object[]{2L, T("2000-01-01", LOS_ANGELES)}, - new Object[]{1L, T("2000-12-01", LOS_ANGELES)}, - new Object[]{2L, T("2001-01-01", LOS_ANGELES)} + new Object[]{1L, t("1999-12-01", LOS_ANGELES)}, + new Object[]{2L, t("2000-01-01", LOS_ANGELES)}, + new Object[]{1L, t("2000-12-01", LOS_ANGELES)}, + new Object[]{2L, t("2001-01-01", LOS_ANGELES)} ) ); } @@ -5909,17 +5922,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(new PeriodGranularity(Period.months(1), null, DateTimes.inferTzFromString(LOS_ANGELES))) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{1L, T("1999-12-01", LOS_ANGELES)}, - new Object[]{2L, T("2000-01-01", LOS_ANGELES)}, - new Object[]{1L, T("2000-12-01", LOS_ANGELES)}, - new Object[]{2L, T("2001-01-01", LOS_ANGELES)} + new Object[]{1L, t("1999-12-01", LOS_ANGELES)}, + new Object[]{2L, t("2000-01-01", LOS_ANGELES)}, + new Object[]{1L, t("2000-12-01", LOS_ANGELES)}, + new Object[]{2L, t("2001-01-01", LOS_ANGELES)} ) ); } @@ -5937,15 +5950,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.MONTH) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{3L, T("2000-01-01")}, - new Object[]{3L, T("2001-01-01")} + new Object[]{3L, t("2000-01-01")}, + new Object[]{3L, t("2001-01-01")} ) ); } @@ -5963,17 +5976,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_floor(timestamp_shift(\"__time\",'P1D',-1),'P1M',null,'UTC')", ValueType.LONG ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -5990,10 +6003,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{1L, T("1999-12-01")}, - new Object[]{2L, T("2000-01-01")}, - new Object[]{1L, T("2000-12-01")}, - new Object[]{2L, T("2001-01-01")} + new Object[]{1L, t("1999-12-01")}, + new Object[]{2L, t("2000-01-01")}, + new Object[]{1L, t("2000-12-01")}, + new Object[]{2L, t("2001-01-01")} ) ); } @@ -6011,17 +6024,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_floor((\"__time\" + -86400000),'P1M',null,'UTC')", ValueType.LONG ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -6038,10 +6051,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{1L, T("1999-12-01")}, - new Object[]{2L, T("2000-01-01")}, - new Object[]{1L, T("2000-12-01")}, - new Object[]{2L, T("2001-01-01")} + new Object[]{1L, t("1999-12-01")}, + new Object[]{2L, t("2000-01-01")}, + new Object[]{1L, t("2000-12-01")}, + new Object[]{2L, t("2001-01-01")} ) ); } @@ -6059,7 +6072,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity( new PeriodGranularity( Period.months(1), @@ -6067,15 +6080,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest DateTimeZone.UTC ) ) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{1L, T("1999-12-01T01:02:03")}, - new Object[]{2L, T("2000-01-01T01:02:03")}, - new Object[]{1L, T("2000-12-01T01:02:03")}, - new Object[]{2L, T("2001-01-01T01:02:03")} + new Object[]{1L, t("1999-12-01T01:02:03")}, + new Object[]{2L, t("2000-01-01T01:02:03")}, + new Object[]{1L, t("2000-12-01T01:02:03")}, + new Object[]{2L, t("2001-01-01T01:02:03")} ) ); } @@ -6093,17 +6106,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(new PeriodGranularity(Period.months(1), null, DateTimes.inferTzFromString(LOS_ANGELES))) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{1L, T("1999-12-01T08")}, - new Object[]{2L, T("2000-01-01T08")}, - new Object[]{1L, T("2000-12-01T08")}, - new Object[]{2L, T("2001-01-01T08")} + new Object[]{1L, t("1999-12-01T08")}, + new Object[]{2L, t("2000-01-01T08")}, + new Object[]{1L, t("2000-12-01T08")}, + new Object[]{2L, t("2001-01-01T08")} ) ); } @@ -6124,17 +6137,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(new PeriodGranularity(Period.months(1), null, DateTimes.inferTzFromString(LOS_ANGELES))) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) .build() ), ImmutableList.of( - new Object[]{1L, T("1999-12-01", LOS_ANGELES)}, - new Object[]{2L, T("2000-01-01", LOS_ANGELES)}, - new Object[]{1L, T("2000-12-01", LOS_ANGELES)}, - new Object[]{2L, T("2001-01-01", LOS_ANGELES)} + new Object[]{1L, t("1999-12-01", LOS_ANGELES)}, + new Object[]{2L, t("2000-01-01", LOS_ANGELES)}, + new Object[]{1L, t("2000-12-01", LOS_ANGELES)}, + new Object[]{2L, t("2001-01-01", LOS_ANGELES)} ) ); } @@ -6157,37 +6170,37 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000/2000-01-02"))) + .intervals(querySegmentSpec(Intervals.of("2000/2000-01-02"))) .granularity(new PeriodGranularity(Period.hours(1), null, DateTimeZone.UTC)) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) .build() ), ImmutableList.builder() - .add(new Object[]{1L, T("2000-01-01")}) - .add(new Object[]{defaultVal, T("2000-01-01T01")}) - .add(new Object[]{defaultVal, T("2000-01-01T02")}) - .add(new Object[]{defaultVal, T("2000-01-01T03")}) - .add(new Object[]{defaultVal, T("2000-01-01T04")}) - .add(new Object[]{defaultVal, T("2000-01-01T05")}) - .add(new Object[]{defaultVal, T("2000-01-01T06")}) - .add(new Object[]{defaultVal, T("2000-01-01T07")}) - .add(new Object[]{defaultVal, T("2000-01-01T08")}) - .add(new Object[]{defaultVal, T("2000-01-01T09")}) - .add(new Object[]{defaultVal, T("2000-01-01T10")}) - .add(new Object[]{defaultVal, T("2000-01-01T11")}) - .add(new Object[]{defaultVal, T("2000-01-01T12")}) - .add(new Object[]{defaultVal, T("2000-01-01T13")}) - .add(new Object[]{defaultVal, T("2000-01-01T14")}) - .add(new Object[]{defaultVal, T("2000-01-01T15")}) - .add(new Object[]{defaultVal, T("2000-01-01T16")}) - .add(new Object[]{defaultVal, T("2000-01-01T17")}) - .add(new Object[]{defaultVal, T("2000-01-01T18")}) - .add(new Object[]{defaultVal, T("2000-01-01T19")}) - .add(new Object[]{defaultVal, T("2000-01-01T20")}) - .add(new Object[]{defaultVal, T("2000-01-01T21")}) - .add(new Object[]{defaultVal, T("2000-01-01T22")}) - .add(new Object[]{defaultVal, T("2000-01-01T23")}) + .add(new Object[]{1L, t("2000-01-01")}) + .add(new Object[]{defaultVal, t("2000-01-01T01")}) + .add(new Object[]{defaultVal, t("2000-01-01T02")}) + .add(new Object[]{defaultVal, t("2000-01-01T03")}) + .add(new Object[]{defaultVal, t("2000-01-01T04")}) + .add(new Object[]{defaultVal, t("2000-01-01T05")}) + .add(new Object[]{defaultVal, t("2000-01-01T06")}) + .add(new Object[]{defaultVal, t("2000-01-01T07")}) + .add(new Object[]{defaultVal, t("2000-01-01T08")}) + .add(new Object[]{defaultVal, t("2000-01-01T09")}) + .add(new Object[]{defaultVal, t("2000-01-01T10")}) + .add(new Object[]{defaultVal, t("2000-01-01T11")}) + .add(new Object[]{defaultVal, t("2000-01-01T12")}) + .add(new Object[]{defaultVal, t("2000-01-01T13")}) + .add(new Object[]{defaultVal, t("2000-01-01T14")}) + .add(new Object[]{defaultVal, t("2000-01-01T15")}) + .add(new Object[]{defaultVal, t("2000-01-01T16")}) + .add(new Object[]{defaultVal, t("2000-01-01T17")}) + .add(new Object[]{defaultVal, t("2000-01-01T18")}) + .add(new Object[]{defaultVal, t("2000-01-01T19")}) + .add(new Object[]{defaultVal, t("2000-01-01T20")}) + .add(new Object[]{defaultVal, t("2000-01-01T21")}) + .add(new Object[]{defaultVal, t("2000-01-01T22")}) + .add(new Object[]{defaultVal, t("2000-01-01T23")}) .build() ); } @@ -6205,19 +6218,19 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(new PeriodGranularity(Period.days(1), null, DateTimeZone.UTC)) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{1L, D("2000-01-01")}, - new Object[]{1L, D("2000-01-02")}, - new Object[]{1L, D("2000-01-03")}, - new Object[]{1L, D("2001-01-01")}, - new Object[]{1L, D("2001-01-02")}, - new Object[]{1L, D("2001-01-03")} + new Object[]{1L, d("2000-01-01")}, + new Object[]{1L, d("2000-01-02")}, + new Object[]{1L, d("2000-01-03")}, + new Object[]{1L, d("2001-01-01")}, + new Object[]{1L, d("2001-01-02")}, + new Object[]{1L, d("2001-01-03")} ) ); } @@ -6227,7 +6240,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest { testQuery( "SELECT SUM(cnt), dt FROM (\n" - + " SELECT CAST(FLOOR(__time TO QUARTER) AS DATE) AS dt,\n" + + " SELECT CASt(FLOOR(__time TO QUARTER) AS DATE) AS dt,\n" + " cnt FROM druid.foo\n" + ") AS x\n" + "GROUP BY dt\n" @@ -6235,15 +6248,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(new PeriodGranularity(Period.months(3), null, DateTimeZone.UTC)) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{3L, D("2000-01-01")}, - new Object[]{3L, D("2001-01-01")} + new Object[]{3L, d("2000-01-01")}, + new Object[]{3L, d("2001-01-01")} ) ); } @@ -6261,16 +6274,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.MONTH) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .descending(true) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{T("2001-01-01"), 3L}, - new Object[]{T("2000-01-01"), 3L} + new Object[]{t("2001-01-01"), 3L}, + new Object[]{t("2000-01-01"), 3L} ) ); } @@ -6288,17 +6301,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_extract(\"__time\",'YEAR','UTC')", ValueType.LONG ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -6334,17 +6347,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_format(\"__time\",'yyyy MM','UTC')", ValueType.STRING ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.STRING))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.STRING))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -6378,17 +6391,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'UTC'),'YEAR','UTC')", ValueType.LONG ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -6413,17 +6426,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d0:v", "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'America/Los_Angeles'),'YEAR','America/Los_Angeles')", ValueType.LONG ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_LOS_ANGELES) .build() ), @@ -6452,15 +6465,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.MONTH) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .limit(1) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 3L} + new Object[]{t("2000-01-01"), 3L} ) ); } @@ -6479,15 +6492,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.MONTH) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .limit(1) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 3L} + new Object[]{t("2000-01-01"), 3L} ) ); } @@ -6507,15 +6520,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.MONTH) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .limit(1) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{T("2000-01-01"), 3L} + new Object[]{t("2000-01-01"), 3L} ) ); } @@ -6531,22 +6544,22 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN( + expression_Virtual_Column( "d1:v", "timestamp_floor(\"__time\",'P1M',null,'UTC')", ValueType.LONG ) ) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim2", "d0"), new DefaultDimensionSpec("d1:v", "d1", ValueType.LONG) ) ) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -6565,19 +6578,19 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ), NullHandling.replaceWithDefault() ? ImmutableList.of( - new Object[]{"", T("2000-01-01"), 2L}, - new Object[]{"", T("2001-01-01"), 1L}, - new Object[]{"a", T("2000-01-01"), 1L}, - new Object[]{"a", T("2001-01-01"), 1L}, - new Object[]{"abc", T("2001-01-01"), 1L} + new Object[]{"", t("2000-01-01"), 2L}, + new Object[]{"", t("2001-01-01"), 1L}, + new Object[]{"a", t("2000-01-01"), 1L}, + new Object[]{"a", t("2001-01-01"), 1L}, + new Object[]{"abc", t("2001-01-01"), 1L} ) : ImmutableList.of( - new Object[]{null, T("2000-01-01"), 1L}, - new Object[]{null, T("2001-01-01"), 1L}, - new Object[]{"", T("2000-01-01"), 1L}, - new Object[]{"a", T("2000-01-01"), 1L}, - new Object[]{"a", T("2001-01-01"), 1L}, - new Object[]{"abc", T("2001-01-01"), 1L} + new Object[]{null, t("2000-01-01"), 1L}, + new Object[]{null, t("2001-01-01"), 1L}, + new Object[]{"", t("2000-01-01"), 1L}, + new Object[]{"a", t("2000-01-01"), 1L}, + new Object[]{"a", t("2001-01-01"), 1L}, + new Object[]{"abc", t("2001-01-01"), 1L} ) ); } @@ -6595,29 +6608,29 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(NOT(SELECTOR("dim1", "", null))) - .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) + .setDimFilter(not(selector("dim1", "", null))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim1", "d0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build(), GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimFilter( - AND( - NOT(SELECTOR("dim1", "xxx", null)), - IN("dim2", ImmutableList.of("1", "10.1", "2", "abc", "def"), null) + and( + not(selector("dim1", "xxx", null)), + in("dim2", ImmutableList.of("1", "10.1", "2", "abc", "def"), null) ) ) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") ) ) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of(new OrderByColumnSpec("d1", OrderByColumnSpec.Direction.ASCENDING)), @@ -6693,25 +6706,25 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(SELECTOR("dim2", "abc", null)) - .setDimensions(DIMS( + .setDimFilter(selector("dim2", "abc", null)) + .setDimensions(dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") )) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) - .setHavingSpec(HAVING(NUMERIC_SELECTOR("a0", "1", null))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) + .setHavingSpec(having(numeric_Selector("a0", "1", null))) .setContext(QUERY_CONTEXT_DEFAULT) .build(), newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(OR( - SELECTOR("dim1", "def", null), - AND( - SELECTOR("dim1", "def", null), - SELECTOR("dim2", "abc", null) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(or( + selector("dim1", "def", null), + and( + selector("dim1", "def", null), + selector("dim2", "abc", null) ) )) .columns("__time", "cnt", "dim1", "dim2") @@ -6720,7 +6733,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{T("2001-01-02"), 1L, "def", "abc"} + new Object[]{t("2001-01-02"), 1L, "def", "abc"} ) ); } @@ -6742,9 +6755,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -6761,8 +6774,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build(), newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(IN("dim2", ImmutableList.of("", "a", "abc"), null)) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(in("dim2", ImmutableList.of("", "a", "abc"), null)) .columns("dim1", "dim2") .context(QUERY_CONTEXT_DEFAULT) .build() @@ -6798,22 +6811,22 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setDimFilter(SELECTOR("dim1", "def", null)) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setDimFilter(selector("dim1", "def", null)) .setContext(QUERY_CONTEXT_DEFAULT) .build(), newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .virtualColumns( - EXPRESSION_VIRTUAL_COLUMN("v0", "timestamp_extract(\"__time\",'MONTH','UTC')", ValueType.LONG) + expression_Virtual_Column("v0", "timestamp_extract(\"__time\",'MONTH','UTC')", ValueType.LONG) ) .filters( - AND( - NOT(SELECTOR("dim1", "", null)), - SELECTOR("dim2", "abc", null) + and( + not(selector("dim1", "", null)), + selector("dim2", "abc", null) ) ) .columns("dim1", "v0") @@ -6842,29 +6855,29 @@ public class CalciteQueryTest extends BaseCalciteQueryTest GroupByQuery .builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setDimFilter(SELECTOR("dim1", "def", null)) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setDimFilter(selector("dim1", "def", null)) .setContext(QUERY_CONTEXT_DEFAULT) .build(), GroupByQuery .builder() .setDataSource(CalciteTests.DATASOURCE1) .setVirtualColumns( - EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_extract(\"__time\",'MONTH','UTC')", ValueType.LONG) + expression_Virtual_Column("d0:v", "timestamp_extract(\"__time\",'MONTH','UTC')", ValueType.LONG) ) .setDimFilter( - AND( - NOT(SELECTOR("dim1", "", null)), - SELECTOR("dim2", "abc", null) + and( + not(selector("dim1", "", null)), + selector("dim2", "abc", null) ) ) - .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) - .setInterval(QSS(Filtration.eternity())) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setAggregatorSpecs( - AGGS( + aggregators( new CardinalityAggregatorFactory( "a0", null, @@ -6907,25 +6920,25 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(NOT(SELECTOR("dim1", "", null))) + .setDimFilter(not(selector("dim1", "", null))) .setDimensions( - DIMS(new ExtractionDimensionSpec("dim1", "d0", new SubstringDimExtractionFn(0, 1))) + dimensionSpec(new ExtractionDimensionSpec("dim1", "d0", new SubstringDimExtractionFn(0, 1))) ) .setContext(QUERY_CONTEXT_DEFAULT) .build(), GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(IN( + .setDimFilter(in( "dim2", ImmutableList.of("1", "2", "a", "d"), new SubstringDimExtractionFn(0, 1) )) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -6952,17 +6965,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE2) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimFilter(OR( + .setDimFilter(or( new LikeDimFilter("dim1", "דר%", null, null), new SelectorDimFilter("dim1", "друид", null) )) - .setDimensions(DIMS( + .setDimensions(dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") )) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ), @@ -6981,15 +6994,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") ) ) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList( @@ -7020,18 +7033,18 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") ) ) .setAggregatorSpecs( - AGGS(new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2")) + aggregators(new CountAggregatorFactory("a0"), new DoubleSumAggregatorFactory("a1", "m2")) ) - .setPostAggregatorSpecs(Collections.singletonList(EXPRESSION_POST_AGG( + .setPostAggregatorSpecs(Collections.singletonList(expresionPostAgg( "s0", "(\"a1\" / \"a0\")" ))) @@ -7065,14 +7078,14 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions( - DIMS( + dimensionSpec( new DefaultDimensionSpec("dim1", "d0") ) ) - .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) + .setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList( @@ -7122,9 +7135,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .setDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS( + .setDimensions(dimensionSpec( new DefaultDimensionSpec("__time", "d0", ValueType.LONG), new DefaultDimensionSpec("m2", "d1", ValueType.DOUBLE), new DefaultDimensionSpec("dim1", "d2") @@ -7132,13 +7145,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .setContext(QUERY_CONTEXT_DEFAULT) .build() ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS( + .setDimensions(dimensionSpec( new DefaultDimensionSpec("d0", "_d0", ValueType.LONG), new DefaultDimensionSpec("d2", "_d1", ValueType.STRING) )) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new CountAggregatorFactory("a0") )) .setLimitSpec( @@ -7180,17 +7193,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest Collections.singletonList( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .filters(SELECTOR("dim2", "a", null)) + .intervals(querySegmentSpec(Filtration.eternity())) + .filters(selector("dim2", "a", null)) .granularity(Granularities.YEAR) .aggregators( - AGGS( + aggregators( new DoubleSumAggregatorFactory("a0", "m1"), new DoubleSumAggregatorFactory("a1", "m2") ) ) .postAggregators( - EXPRESSION_POST_AGG("p0", "(\"a0\" + \"a1\")") + expresionPostAgg("p0", "(\"a0\" + \"a1\")") ) .descending(true) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -7220,11 +7233,11 @@ public class CalciteQueryTest extends BaseCalciteQueryTest Collections.singletonList( new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("m1", "d0", ValueType.FLOAT)) .filters("dim2", "a") - .aggregators(AGGS( + .aggregators(aggregators( new DoubleSumAggregatorFactory("a0:sum", "m2"), new CountAggregatorFactory("a0:count"), new DoubleSumAggregatorFactory("a1", "m1"), @@ -7240,7 +7253,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new FieldAccessPostAggregator(null, "a0:count") ) ), - EXPRESSION_POST_AGG("p0", "(\"a1\" + \"a2\")") + expresionPostAgg("p0", "(\"a1\" + \"a2\")") ) ) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) @@ -7263,8 +7276,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .virtualColumns(EXPRESSION_VIRTUAL_COLUMN( + .intervals(querySegmentSpec(Filtration.eternity())) + .virtualColumns(expression_Virtual_Column( "v0", "concat(\"dim1\",'-',\"dim1\",'_',\"dim1\")", ValueType.STRING @@ -7289,8 +7302,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .virtualColumns(EXPRESSION_VIRTUAL_COLUMN( + .intervals(querySegmentSpec(Filtration.eternity())) + .virtualColumns(expression_Virtual_Column( "v0", "concat(\"dim1\",concat(\"dim2\",'x'),\"m2\",9999,\"dim1\")", ValueType.STRING @@ -7319,8 +7332,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .virtualColumns(EXPRESSION_VIRTUAL_COLUMN("v0", "concat(\"dim1\",\"dim1\")", ValueType.STRING)) + .intervals(querySegmentSpec(Filtration.eternity())) + .virtualColumns(expression_Virtual_Column("v0", "concat(\"dim1\",\"dim1\")", ValueType.STRING)) .columns("v0") .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .context(QUERY_CONTEXT_DEFAULT) @@ -7341,8 +7354,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Filtration.eternity())) - .virtualColumns(EXPRESSION_VIRTUAL_COLUMN( + .intervals(querySegmentSpec(Filtration.eternity())) + .virtualColumns(expression_Virtual_Column( "v0", "concat(\"dim1\",CAST(\"m2\", 'STRING'))", ValueType.STRING @@ -7380,15 +7393,15 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.of("2000-01-01/2002-01-01"))) + .intervals(querySegmentSpec(Intervals.of("2000-01-01/2002-01-01"))) .granularity(Granularities.MONTH) - .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .aggregators(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), ImmutableList.of( - new Object[]{3L, T("2000-01-01")}, - new Object[]{3L, T("2001-01-01")} + new Object[]{3L, t("2000-01-01")}, + new Object[]{3L, t("2001-01-01")} ) ); @@ -7406,20 +7419,20 @@ public class CalciteQueryTest extends BaseCalciteQueryTest new QueryDataSource( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Intervals.utc( + .setInterval(querySegmentSpec(Intervals.utc( DateTimes.of("2000-01-01").getMillis(), JodaUtils.MAX_INSTANT ))) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) - .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) + .setDimensions(dimensionSpec(new DefaultDimensionSpec("dim2", "d0"))) + .setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) - .setInterval(QSS(Filtration.eternity())) + .setInterval(querySegmentSpec(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setAggregatorSpecs(AGGS( + .setAggregatorSpecs(aggregators( new LongSumAggregatorFactory("_a0", "a0"), new CountAggregatorFactory("_a1") )) @@ -7447,10 +7460,13 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) - .setInterval(QSS(Intervals.utc(DateTimes.of("2000-01-01").getMillis(), JodaUtils.MAX_INSTANT))) + .setInterval(querySegmentSpec(Intervals.utc( + DateTimes.of("2000-01-01").getMillis(), + JodaUtils.MAX_INSTANT + ))) .setGranularity(Granularities.ALL) - .setDimFilter(NOT(SELECTOR("dim1", "", null))) - .setDimensions(DIMS(new ExtractionDimensionSpec( + .setDimFilter(not(selector("dim1", "", null))) + .setDimensions(dimensionSpec(new ExtractionDimensionSpec( "dim1", "d0", new SubstringDimExtractionFn(0, 1) @@ -7459,14 +7475,17 @@ public class CalciteQueryTest extends BaseCalciteQueryTest .build(), Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(Intervals.utc(DateTimes.of("2000-01-01").getMillis(), JodaUtils.MAX_INSTANT))) + .intervals(querySegmentSpec(Intervals.utc( + DateTimes.of("2000-01-01").getMillis(), + JodaUtils.MAX_INSTANT + ))) .granularity(Granularities.ALL) - .filters(IN( + .filters(in( "dim2", ImmutableList.of("1", "2", "a", "d"), new SubstringDimExtractionFn(0, 1) )) - .aggregators(AGGS(new CountAggregatorFactory("a0"))) + .aggregators(aggregators(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() ), @@ -7541,17 +7560,22 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE3) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1") - .filters(SELECTOR("f1", "0.1", null)) - .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) - .limit(1) - .context(QUERY_CONTEXT_DEFAULT) - .build() + << << << < HEAD + .filters(SELECTOR("f1", "0.1", null)) + .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) + ======= + .filters(selector("f1", "0.1", null)) + .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + >>>>>>>master + .limit(1) + .context(QUERY_CONTEXT_DEFAULT) + .build() ), - ImmutableList.of( - new Object[]{"10.1"} - ) + ImmutableList.of( + new Object[]{"10.1"} + ) ); } @@ -7563,17 +7587,22 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE3) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1") - .filters(SELECTOR("d1", "1.7", null)) - .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) - .limit(1) - .context(QUERY_CONTEXT_DEFAULT) - .build() + << << << < HEAD + .filters(SELECTOR("d1", "1.7", null)) + .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) + ======= + .filters(selector("d1", "1.7", null)) + .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST) + >>>>>>>master + .limit(1) + .context(QUERY_CONTEXT_DEFAULT) + .build() ), - ImmutableList.of( - new Object[]{"10.1"} - ) + ImmutableList.of( + new Object[]{"10.1"} + ) ); } @@ -7585,9 +7614,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of( newScanQueryBuilder() .dataSource(CalciteTests.DATASOURCE3) - .intervals(QSS(Filtration.eternity())) + .intervals(querySegmentSpec(Filtration.eternity())) .columns("dim1") - .filters(SELECTOR("l1", "7", null)) + .filters(selector("l1", "7", null)) .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .limit(1) .context(QUERY_CONTEXT_DEFAULT) diff --git a/web-console/.gitignore b/web-console/.gitignore index 540af4c3257..1e0bebd4e2e 100644 --- a/web-console/.gitignore +++ b/web-console/.gitignore @@ -2,6 +2,7 @@ node/ node_modules/ resources/ public/ +assets/ lib/*.css coordinator-console/ diff --git a/web-console/package-lock.json b/web-console/package-lock.json index ad3f8f7b408..d0abc172f94 100644 --- a/web-console/package-lock.json +++ b/web-console/package-lock.json @@ -14,22 +14,22 @@ } }, "@babel/core": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.2.2.tgz", - "integrity": "sha512-59vB0RWt09cAct5EIe58+NzGP4TFSD3Bz//2/ELy3ZeTeKF6VTD1AXlH8BGGbCX0PuobZBsIzO7IAI9PH67eKw==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.3.4.tgz", + "integrity": "sha512-jRsuseXBo9pN197KnDwhhaaBzyZr2oIcLHHTt2oDdQrej5Qp57dCCJafWx5ivU8/alEYDpssYqv1MUqcxwQlrA==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", - "@babel/generator": "^7.2.2", + "@babel/generator": "^7.3.4", "@babel/helpers": "^7.2.0", - "@babel/parser": "^7.2.2", + "@babel/parser": "^7.3.4", "@babel/template": "^7.2.2", - "@babel/traverse": "^7.2.2", - "@babel/types": "^7.2.2", + "@babel/traverse": "^7.3.4", + "@babel/types": "^7.3.4", "convert-source-map": "^1.1.0", "debug": "^4.1.0", "json5": "^2.1.0", - "lodash": "^4.17.10", + "lodash": "^4.17.11", "resolve": "^1.3.2", "semver": "^5.4.1", "source-map": "^0.5.0" @@ -53,12 +53,6 @@ "minimist": "^1.2.0" } }, - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true - }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -68,14 +62,14 @@ } }, "@babel/generator": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.3.0.tgz", - "integrity": "sha512-dZTwMvTgWfhmibq4V9X+LMf6Bgl7zAodRn9PvcPdhlzFMbvUutx74dbEv7Atz3ToeEpevYEJtAwfxq/bDCzHWg==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.3.4.tgz", + "integrity": "sha512-8EXhHRFqlVVWXPezBW5keTiQi/rJMQTg/Y9uVCEZ0CAF3PKtCCaVRnp64Ii1ujhkoDhhF1fVsImoN4yJ2uz4Wg==", "dev": true, "requires": { - "@babel/types": "^7.3.0", + "@babel/types": "^7.3.4", "jsesc": "^2.5.1", - "lodash": "^4.17.10", + "lodash": "^4.17.11", "source-map": "^0.5.0", "trim-right": "^1.0.1" }, @@ -152,9 +146,9 @@ } }, "@babel/parser": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.3.1.tgz", - "integrity": "sha512-ATz6yX/L8LEnC3dtLQnIx4ydcPxhLcoy9Vl6re00zb2w5lG6itY6Vhnr1KFRPq/FHNsgl/gh2mjNN20f9iJTTA==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.3.4.tgz", + "integrity": "sha512-tXZCqWtlOOP4wgCp6RjRvLmfuhnqTLy9VHwRochJBCP2nDm27JnnuFEnXFASVyQNHk36jD1tAammsCEEqgscIQ==", "dev": true }, "@babel/plugin-syntax-object-rest-spread": { @@ -167,9 +161,9 @@ } }, "@babel/runtime": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.3.1.tgz", - "integrity": "sha512-7jGW8ppV0ant637pIqAcFfQDDH1orEPGJb8aXfUozuCU3QqX7rX4DA8iwrbPrR1hcH0FTTHz47yQnk+bl5xHQA==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.3.4.tgz", + "integrity": "sha512-IvfvnMdSaLBateu0jfsYIpZTxAc2cKEXEMiezGGN75QcBcecDUKd3PgLAncT0oOgxKy8dd8hrJKj9MfzgfZd6g==", "requires": { "regenerator-runtime": "^0.12.0" } @@ -186,20 +180,20 @@ } }, "@babel/traverse": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.2.3.tgz", - "integrity": "sha512-Z31oUD/fJvEWVR0lNZtfgvVt512ForCTNKYcJBGbPb1QZfve4WGH8Wsy7+Mev33/45fhP/hwQtvgusNdcCMgSw==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.3.4.tgz", + "integrity": "sha512-TvTHKp6471OYEcE/91uWmhR6PrrYywQntCHSaZ8CM8Vmp+pjAusal4nGB2WCCQd0rvI7nOMKn9GnbcvTUz3/ZQ==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", - "@babel/generator": "^7.2.2", + "@babel/generator": "^7.3.4", "@babel/helper-function-name": "^7.1.0", "@babel/helper-split-export-declaration": "^7.0.0", - "@babel/parser": "^7.2.3", - "@babel/types": "^7.2.2", + "@babel/parser": "^7.3.4", + "@babel/types": "^7.3.4", "debug": "^4.1.0", "globals": "^11.1.0", - "lodash": "^4.17.10" + "lodash": "^4.17.11" }, "dependencies": { "debug": { @@ -210,50 +204,30 @@ "requires": { "ms": "^2.1.1" } - }, - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true } } }, "@babel/types": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.3.0.tgz", - "integrity": "sha512-QkFPw68QqWU1/RVPyBe8SO7lXbPfjtqAxRYQKpFpaB8yMq7X2qAqfwK5LKoQufEkSmO5NQ70O6Kc3Afk03RwXw==", + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.3.4.tgz", + "integrity": "sha512-WEkp8MsLftM7O/ty580wAmZzN1nDmCACc5+jFzUt+GUFNNIi3LdRlueYz0YIlmJhlZx1QYDMZL5vdWCL0fNjFQ==", "dev": true, "requires": { "esutils": "^2.0.2", - "lodash": "^4.17.10", + "lodash": "^4.17.11", "to-fast-properties": "^2.0.0" } }, "@blueprintjs/core": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/@blueprintjs/core/-/core-3.12.0.tgz", - "integrity": "sha512-nZGVzgel8YjFye14MU39iMLHTx7iBxG/vPrl432q6pJ7PDuk0M2vJK/eH/0pWISzhTK+/t78mpt3WhUelsvkQg==", - "requires": { - "@blueprintjs/icons": "^3.5.1", - "@types/dom4": "^2.0.0", - "classnames": "^2.2", - "dom4": "^2.0.1", - "normalize.css": "^8.0.0", - "popper.js": "^1.14.1", - "react-popper": "^1.0.0", - "react-transition-group": "^2.2.1", - "resize-observer-polyfill": "^1.5.0", - "tslib": "^1.9.0" - } - }, - "@blueprintjs/icons": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/@blueprintjs/icons/-/icons-3.5.1.tgz", - "integrity": "sha512-sognhg9kAViMCrd2sZJ9KAo0vSm7Co/cBHGLSxblPodV4cI7xyzFi1cavPx+d1D3Q72ATOM46T0N7dggVac/Tw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@blueprintjs/core/-/core-1.0.1.tgz", + "integrity": "sha1-gfv9/g6gK7kLgLG+MaAec+tiWAY=", "requires": { "classnames": "^2.2", - "tslib": "^1.9.0" + "dom4": "^1.8", + "normalize.css": "4.1.1", + "pure-render-decorator": "~1.1.1", + "tether": "^1.2" } }, "@csstools/convert-colors": { @@ -285,16 +259,11 @@ "dev": true }, "@types/d3-array": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-1.2.4.tgz", - "integrity": "sha512-3r1fOAAb+SGfcOGXty/LGvoP0ovMec4UtGNUyHOSzYyvSGpmt+eNMxLowol/3HryusevznSfcHZebEShXMwsZA==", + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-1.2.6.tgz", + "integrity": "sha512-/EcY/15X5tnwkMT2txpjiLUNJj5xHA2vGHOXI8NTYGhETK914RRLQLjNm6EpAI1D2IY5vh3CzuLODnqBAwKjPA==", "dev": true }, - "@types/dom4": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@types/dom4/-/dom4-2.0.1.tgz", - "integrity": "sha512-kSkVAvWmMZiCYtvqjqQEwOmvKwcH+V4uiv3qPQ8pAh1Xl39xggGEo8gHUqV4waYGHezdFw0rKBR8Jt0CrQSDZA==" - }, "@types/history": { "version": "4.7.2", "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.2.tgz", @@ -302,41 +271,42 @@ "dev": true }, "@types/hjson": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/@types/hjson/-/hjson-2.4.0.tgz", - "integrity": "sha512-U1/DhtNB1DeIjJjusD3MwAnX1AeAmxlTrBK+R+hvJ07VBDeNgbQI0lb8rLCMXWRH30ok+x6U31ZoEYgwztJkKA==" + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@types/hjson/-/hjson-2.4.1.tgz", + "integrity": "sha512-yXq/C73UHM8GQc6RYJnUXUgxudr2Q9227Iawhkp03YCnfJJTc+6LJnnVLx+UR/Dvw6imO5Q3vpGNmR9IRBI0JQ==", + "dev": true }, "@types/jest": { - "version": "23.3.13", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-23.3.13.tgz", - "integrity": "sha512-ePl4l+7dLLmCucIwgQHAgjiepY++qcI6nb8eAwGNkB6OxmTe3Z9rQU3rSpomqu42PCCnlThZbOoxsf+qylJsLA==", + "version": "23.3.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-23.3.14.tgz", + "integrity": "sha512-Q5hTcfdudEL2yOmluA1zaSyPbzWPmJ3XfSWeP3RyoYvS9hnje1ZyagrZOuQ6+1nQC1Gw+7gap3pLNL3xL6UBug==", "dev": true }, "@types/lodash": { - "version": "4.14.120", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.120.tgz", - "integrity": "sha512-jQ21kQ120mo+IrDs1nFNVm/AsdFxIx2+vZ347DbogHJPd/JzKNMOqU6HCYin1W6v8l5R9XSO2/e9cxmn7HAnVw==", + "version": "4.14.121", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.121.tgz", + "integrity": "sha512-ORj7IBWj13iYufXt/VXrCNMbUuCTJfhzme5kx9U/UtcIPdJYuvPDUAlHlbNhz/8lKCLy9XGIZnGrqXOtQbPGoQ==", "dev": true }, "@types/lodash.debounce": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@types/lodash.debounce/-/lodash.debounce-4.0.4.tgz", - "integrity": "sha512-W3oJCQXSCmOE9uIqOdrUWT08YNSXyqXed8JhxJKCe4SH40yxz5HSdtStN1ZQYkvT7S/tae8PA34Y0TO5C7Z8Ng==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/lodash.debounce/-/lodash.debounce-4.0.5.tgz", + "integrity": "sha512-f7x1/7U8xNDCYgO0UEB9bRkYDxmOl3OAFZS5l4PvTa6gtURzy1Mxv2f7f1+WBSGgOGhl5jia+Hw027H1f+S90Q==", "dev": true, "requires": { "@types/lodash": "*" } }, "@types/mocha": { - "version": "5.2.5", - "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-5.2.5.tgz", - "integrity": "sha512-lAVp+Kj54ui/vLUFxsJTMtWvZraZxum3w3Nwkble2dNuV5VnPA+Mi2oGX9XYJAaIvZi3tn3cbjS/qcJXRb6Bww==", + "version": "5.2.6", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-5.2.6.tgz", + "integrity": "sha512-1axi39YdtBI7z957vdqXI4Ac25e7YihYQtJa+Clnxg1zTJEaIRbndt71O3sP4GAMgiAm0pY26/b9BrY4MR/PMw==", "dev": true }, "@types/node": { - "version": "10.12.18", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz", - "integrity": "sha512-fh+pAqt4xRzPfqA6eh3Z2y6fyZavRIumvjhaCL753+TVkGKGhpPeyrJG2JftD0T9q4GF00KjefsQ+PQNDdWQaQ==", + "version": "10.12.27", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.27.tgz", + "integrity": "sha512-e9wgeY6gaY21on3ve0xAjgBVjGDWq/xUteK0ujsE53bUoxycMkqfnkUgMt6ffZtykZ5X12Mg3T7Pw4TRCObDKg==", "dev": true }, "@types/numeral": { @@ -346,15 +316,15 @@ "dev": true }, "@types/prop-types": { - "version": "15.5.8", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.5.8.tgz", - "integrity": "sha512-3AQoUxQcQtLHsK25wtTWIoIpgYjH3vSDroZOUr7PpCHw/jLY1RB9z9E8dBT/OSmwStVgkRNvdh+ZHNiomRieaw==", + "version": "15.5.9", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.5.9.tgz", + "integrity": "sha512-Nha5b+jmBI271jdTMwrHiNXM+DvThjHOfyZtMX9kj/c/LUj2xiLHsG/1L3tJ8DjAoQN48cHwUwtqBotjyXaSdQ==", "dev": true }, "@types/react": { - "version": "16.7.21", - "resolved": "https://registry.npmjs.org/@types/react/-/react-16.7.21.tgz", - "integrity": "sha512-8BPxwygC83LgaIjOVVLrzB4mpP2u1ih01fbfy76L3h9OgKN+fNyMVPXj/0mGpWnxImjiM/2lqb3YOeT2Ca+NYQ==", + "version": "16.8.5", + "resolved": "https://registry.npmjs.org/@types/react/-/react-16.8.5.tgz", + "integrity": "sha512-8LRySaaSJVLNZb2dbOGvGmzn88cbAfrgDpuWy+6lLgQ0OJFgHHvyuaCX4/7ikqJlpmCPf4uazJAZcfTQRdJqdQ==", "dev": true, "requires": { "@types/prop-types": "*", @@ -362,18 +332,18 @@ } }, "@types/react-dom": { - "version": "16.0.11", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-16.0.11.tgz", - "integrity": "sha512-x6zUx9/42B5Kl2Vl9HlopV8JF64wLpX3c+Pst9kc1HgzrsH+mkehe/zmHMQTplIrR48H2gpU7ZqurQolYu8XBA==", + "version": "16.8.2", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-16.8.2.tgz", + "integrity": "sha512-MX7n1wq3G/De15RGAAqnmidzhr2Y9O/ClxPxyqaNg96pGyeXUYPSvujgzEVpLo9oIP4Wn1UETl+rxTN02KEpBw==", "dev": true, "requires": { "@types/react": "*" } }, "@types/react-router": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-4.4.3.tgz", - "integrity": "sha512-8GmjakEBFNCLJbpg9jtDp1EDvFP0VkIPPKBpVwmB3Q+9whFoHu8rluMUXUE5SoGkEQvVOtgJzWmUsJojNpFMQQ==", + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-4.4.4.tgz", + "integrity": "sha512-TZVfpT6nvUv/lbho/nRtckEtgkhspOQr3qxrnpXixwgQRKKyg5PvDfNKc8Uend/p/Pi70614VCmC0NPAKWF+0g==", "dev": true, "requires": { "@types/history": "*", @@ -392,184 +362,188 @@ } }, "@types/react-table": { - "version": "6.7.21", - "resolved": "https://registry.npmjs.org/@types/react-table/-/react-table-6.7.21.tgz", - "integrity": "sha512-XiYCcn/CBajrj18vLA3kO79AHr5yZTCJe2kl87ZNTRxLO14y9D0IGeGZ3xLsqhfYrJSkkVzAJV8v+bQ4nuKCRQ==", + "version": "6.7.22", + "resolved": "https://registry.npmjs.org/@types/react-table/-/react-table-6.7.22.tgz", + "integrity": "sha512-gFW1QLTMmcPKUVsb2YCF9m6FwwTelVRehb8hjJRluM9KKJl5ANA0jSYZz4zN9fVFsMn11BoYO43a/3jKi2XH/w==", "dev": true, "requires": { "@types/react": "*" } }, "@webassemblyjs/ast": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.7.11.tgz", - "integrity": "sha512-ZEzy4vjvTzScC+SH8RBssQUawpaInUdMTYwYYLh54/s8TuT0gBLuyUnppKsVyZEi876VmmStKsUs28UxPgdvrA==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.8.3.tgz", + "integrity": "sha512-xy3m06+Iu4D32+6soz6zLnwznigXJRuFNTovBX2M4GqVqLb0dnyWLbPnpcXvUSdEN+9DVyDeaq2jyH1eIL2LZQ==", "dev": true, "requires": { - "@webassemblyjs/helper-module-context": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/wast-parser": "1.7.11" + "@webassemblyjs/helper-module-context": "1.8.3", + "@webassemblyjs/helper-wasm-bytecode": "1.8.3", + "@webassemblyjs/wast-parser": "1.8.3" } }, "@webassemblyjs/floating-point-hex-parser": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.7.11.tgz", - "integrity": "sha512-zY8dSNyYcgzNRNT666/zOoAyImshm3ycKdoLsyDw/Bwo6+/uktb7p4xyApuef1dwEBo/U/SYQzbGBvV+nru2Xg==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.3.tgz", + "integrity": "sha512-vq1TISG4sts4f0lDwMUM0f3kpe0on+G3YyV5P0IySHFeaLKRYZ++n2fCFfG4TcCMYkqFeTUYFxm75L3ddlk2xA==", "dev": true }, "@webassemblyjs/helper-api-error": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.7.11.tgz", - "integrity": "sha512-7r1qXLmiglC+wPNkGuXCvkmalyEstKVwcueZRP2GNC2PAvxbLYwLLPr14rcdJaE4UtHxQKfFkuDFuv91ipqvXg==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.3.tgz", + "integrity": "sha512-BmWEynI4FnZbjk8CaYZXwcv9a6gIiu+rllRRouQUo73hglanXD3AGFJE7Q4JZCoVE0p5/jeX6kf5eKa3D4JxwQ==", "dev": true }, "@webassemblyjs/helper-buffer": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.7.11.tgz", - "integrity": "sha512-MynuervdylPPh3ix+mKZloTcL06P8tenNH3sx6s0qE8SLR6DdwnfgA7Hc9NSYeob2jrW5Vql6GVlsQzKQCa13w==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.3.tgz", + "integrity": "sha512-iVIMhWnNHoFB94+/2l7LpswfCsXeMRnWfExKtqsZ/E2NxZyUx9nTeKK/MEMKTQNEpyfznIUX06OchBHQ+VKi/Q==", "dev": true }, "@webassemblyjs/helper-code-frame": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.7.11.tgz", - "integrity": "sha512-T8ESC9KMXFTXA5urJcyor5cn6qWeZ4/zLPyWeEXZ03hj/x9weSokGNkVCdnhSabKGYWxElSdgJ+sFa9G/RdHNw==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.3.tgz", + "integrity": "sha512-K1UxoJML7GKr1QXR+BG7eXqQkvu+eEeTjlSl5wUFQ6W6vaOc5OwSxTcb3oE9x/3+w4NHhrIKD4JXXCZmLdL2cg==", "dev": true, "requires": { - "@webassemblyjs/wast-printer": "1.7.11" + "@webassemblyjs/wast-printer": "1.8.3" } }, "@webassemblyjs/helper-fsm": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-fsm/-/helper-fsm-1.7.11.tgz", - "integrity": "sha512-nsAQWNP1+8Z6tkzdYlXT0kxfa2Z1tRTARd8wYnc/e3Zv3VydVVnaeePgqUzFrpkGUyhUUxOl5ML7f1NuT+gC0A==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.3.tgz", + "integrity": "sha512-387zipfrGyO77/qm7/SDUiZBjQ5KGk4qkrVIyuoubmRNIiqn3g+6ijY8BhnlGqsCCQX5bYKOnttJobT5xoyviA==", "dev": true }, "@webassemblyjs/helper-module-context": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-module-context/-/helper-module-context-1.7.11.tgz", - "integrity": "sha512-JxfD5DX8Ygq4PvXDucq0M+sbUFA7BJAv/GGl9ITovqE+idGX+J3QSzJYz+LwQmL7fC3Rs+utvWoJxDb6pmC0qg==", - "dev": true + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.3.tgz", + "integrity": "sha512-lPLFdQfaRssfnGEJit5Sk785kbBPPPK4ZS6rR5W/8hlUO/5v3F+rN8XuUcMj/Ny9iZiyKhhuinWGTUuYL4VKeQ==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.8.3", + "mamacro": "^0.0.3" + } }, "@webassemblyjs/helper-wasm-bytecode": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.7.11.tgz", - "integrity": "sha512-cMXeVS9rhoXsI9LLL4tJxBgVD/KMOKXuFqYb5oCJ/opScWpkCMEz9EJtkonaNcnLv2R3K5jIeS4TRj/drde1JQ==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.3.tgz", + "integrity": "sha512-R1nJW7bjyJLjsJQR5t3K/9LJ0QWuZezl8fGa49DZq4IVaejgvkbNlKEQxLYTC579zgT4IIIVHb5JA59uBPHXyw==", "dev": true }, "@webassemblyjs/helper-wasm-section": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.7.11.tgz", - "integrity": "sha512-8ZRY5iZbZdtNFE5UFunB8mmBEAbSI3guwbrsCl4fWdfRiAcvqQpeqd5KHhSWLL5wuxo53zcaGZDBU64qgn4I4Q==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.3.tgz", + "integrity": "sha512-P6F7D61SJY73Yz+fs49Q3+OzlYAZP86OfSpaSY448KzUy65NdfzDmo2NPVte+Rw4562MxEAacvq/mnDuvRWOcg==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-buffer": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/wasm-gen": "1.7.11" + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/helper-buffer": "1.8.3", + "@webassemblyjs/helper-wasm-bytecode": "1.8.3", + "@webassemblyjs/wasm-gen": "1.8.3" } }, "@webassemblyjs/ieee754": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.7.11.tgz", - "integrity": "sha512-Mmqx/cS68K1tSrvRLtaV/Lp3NZWzXtOHUW2IvDvl2sihAwJh4ACE0eL6A8FvMyDG9abes3saB6dMimLOs+HMoQ==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.8.3.tgz", + "integrity": "sha512-UD4HuLU99hjIvWz1pD68b52qsepWQlYCxDYVFJQfHh3BHyeAyAlBJ+QzLR1nnS5J6hAzjki3I3AoJeobNNSZlg==", "dev": true, "requires": { "@xtuc/ieee754": "^1.2.0" } }, "@webassemblyjs/leb128": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.7.11.tgz", - "integrity": "sha512-vuGmgZjjp3zjcerQg+JA+tGOncOnJLWVkt8Aze5eWQLwTQGNgVLcyOTqgSCxWTR4J42ijHbBxnuRaL1Rv7XMdw==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.8.3.tgz", + "integrity": "sha512-XXd3s1BmkC1gpGABuCRLqCGOD6D2L+Ma2BpwpjrQEHeQATKWAQtxAyU9Z14/z8Ryx6IG+L4/NDkIGHrccEhRUg==", "dev": true, "requires": { - "@xtuc/long": "4.2.1" + "@xtuc/long": "4.2.2" } }, "@webassemblyjs/utf8": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.7.11.tgz", - "integrity": "sha512-C6GFkc7aErQIAH+BMrIdVSmW+6HSe20wg57HEC1uqJP8E/xpMjXqQUxkQw07MhNDSDcGpxI9G5JSNOQCqJk4sA==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.8.3.tgz", + "integrity": "sha512-Wv/WH9Zo5h5ZMyfCNpUrjFsLZ3X1amdfEuwdb7MLdG3cPAjRS6yc6ElULlpjLiiBTuzvmLhr3ENsuGyJ3wyCgg==", "dev": true }, "@webassemblyjs/wasm-edit": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.7.11.tgz", - "integrity": "sha512-FUd97guNGsCZQgeTPKdgxJhBXkUbMTY6hFPf2Y4OedXd48H97J+sOY2Ltaq6WGVpIH8o/TGOVNiVz/SbpEMJGg==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.3.tgz", + "integrity": "sha512-nB19eUx3Yhi1Vvv3yev5r+bqQixZprMtaoCs1brg9Efyl8Hto3tGaUoZ0Yb4Umn/gQCyoEGFfUxPLp1/8+Jvnw==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-buffer": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/helper-wasm-section": "1.7.11", - "@webassemblyjs/wasm-gen": "1.7.11", - "@webassemblyjs/wasm-opt": "1.7.11", - "@webassemblyjs/wasm-parser": "1.7.11", - "@webassemblyjs/wast-printer": "1.7.11" + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/helper-buffer": "1.8.3", + "@webassemblyjs/helper-wasm-bytecode": "1.8.3", + "@webassemblyjs/helper-wasm-section": "1.8.3", + "@webassemblyjs/wasm-gen": "1.8.3", + "@webassemblyjs/wasm-opt": "1.8.3", + "@webassemblyjs/wasm-parser": "1.8.3", + "@webassemblyjs/wast-printer": "1.8.3" } }, "@webassemblyjs/wasm-gen": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.7.11.tgz", - "integrity": "sha512-U/KDYp7fgAZX5KPfq4NOupK/BmhDc5Kjy2GIqstMhvvdJRcER/kUsMThpWeRP8BMn4LXaKhSTggIJPOeYHwISA==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.3.tgz", + "integrity": "sha512-sDNmu2nLBJZ/huSzlJvd9IK8B1EjCsOl7VeMV9VJPmxKYgTJ47lbkSP+KAXMgZWGcArxmcrznqm7FrAPQ7vVGg==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/ieee754": "1.7.11", - "@webassemblyjs/leb128": "1.7.11", - "@webassemblyjs/utf8": "1.7.11" + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/helper-wasm-bytecode": "1.8.3", + "@webassemblyjs/ieee754": "1.8.3", + "@webassemblyjs/leb128": "1.8.3", + "@webassemblyjs/utf8": "1.8.3" } }, "@webassemblyjs/wasm-opt": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.7.11.tgz", - "integrity": "sha512-XynkOwQyiRidh0GLua7SkeHvAPXQV/RxsUeERILmAInZegApOUAIJfRuPYe2F7RcjOC9tW3Cb9juPvAC/sCqvg==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.3.tgz", + "integrity": "sha512-j8lmQVFR+FR4/645VNgV4R/Jz8i50eaPAj93GZyd3EIJondVshE/D9pivpSDIXyaZt+IkCodlzOoZUE4LnQbeA==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-buffer": "1.7.11", - "@webassemblyjs/wasm-gen": "1.7.11", - "@webassemblyjs/wasm-parser": "1.7.11" + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/helper-buffer": "1.8.3", + "@webassemblyjs/wasm-gen": "1.8.3", + "@webassemblyjs/wasm-parser": "1.8.3" } }, "@webassemblyjs/wasm-parser": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.7.11.tgz", - "integrity": "sha512-6lmXRTrrZjYD8Ng8xRyvyXQJYUQKYSXhJqXOBLw24rdiXsHAOlvw5PhesjdcaMadU/pyPQOJ5dHreMjBxwnQKg==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.3.tgz", + "integrity": "sha512-NBI3SNNtRoy4T/KBsRZCAWUzE9lI94RH2nneLwa1KKIrt/2zzcTavWg6oY05ArCbb/PZDk3OUi63CD1RYtN65w==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-api-error": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/ieee754": "1.7.11", - "@webassemblyjs/leb128": "1.7.11", - "@webassemblyjs/utf8": "1.7.11" + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/helper-api-error": "1.8.3", + "@webassemblyjs/helper-wasm-bytecode": "1.8.3", + "@webassemblyjs/ieee754": "1.8.3", + "@webassemblyjs/leb128": "1.8.3", + "@webassemblyjs/utf8": "1.8.3" } }, "@webassemblyjs/wast-parser": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-parser/-/wast-parser-1.7.11.tgz", - "integrity": "sha512-lEyVCg2np15tS+dm7+JJTNhNWq9yTZvi3qEhAIIOaofcYlUp0UR5/tVqOwa/gXYr3gjwSZqw+/lS9dscyLelbQ==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-parser/-/wast-parser-1.8.3.tgz", + "integrity": "sha512-gZPst4CNcmGtKC1eYQmgCx6gwQvxk4h/nPjfPBbRoD+Raw3Hs+BS3yhrfgyRKtlYP+BJ8LcY9iFODEQofl2qbg==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/floating-point-hex-parser": "1.7.11", - "@webassemblyjs/helper-api-error": "1.7.11", - "@webassemblyjs/helper-code-frame": "1.7.11", - "@webassemblyjs/helper-fsm": "1.7.11", - "@xtuc/long": "4.2.1" + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/floating-point-hex-parser": "1.8.3", + "@webassemblyjs/helper-api-error": "1.8.3", + "@webassemblyjs/helper-code-frame": "1.8.3", + "@webassemblyjs/helper-fsm": "1.8.3", + "@xtuc/long": "4.2.2" } }, "@webassemblyjs/wast-printer": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.7.11.tgz", - "integrity": "sha512-m5vkAsuJ32QpkdkDOUPGSltrg8Cuk3KBx4YrmAGQwCZPRdUHXxG4phIOuuycLemHFr74sWL9Wthqss4fzdzSwg==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.8.3.tgz", + "integrity": "sha512-DTA6kpXuHK4PHu16yAD9QVuT1WZQRT7079oIFFmFSjqjLWGXS909I/7kiLTn931mcj7wGsaUNungjwNQ2lGQ3Q==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/wast-parser": "1.7.11", - "@xtuc/long": "4.2.1" + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/wast-parser": "1.8.3", + "@xtuc/long": "4.2.2" } }, "@xtuc/ieee754": { @@ -579,9 +553,9 @@ "dev": true }, "@xtuc/long": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.1.tgz", - "integrity": "sha512-FZdkNBDqBRHKQ2MEbSC17xnPFOhZxeJ2YGSfr2BKf3sujG49Qe3bB+rGCwQfIaA7WHnGeGkSijX4FuBCdrzW/g==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", "dev": true }, "abab": { @@ -629,9 +603,9 @@ }, "dependencies": { "acorn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.0.5.tgz", - "integrity": "sha512-i33Zgp3XWtmZBMNvCr4azvOFeWVw1Rk6p3hfi3LUDvIFraOMywb1kAtrbi+med14m4Xfpqm3zRZMT+c0FNE7kg==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.0.tgz", + "integrity": "sha512-MW/FjM+IvU9CgBzjO3UIPCE2pyEwUsoFl+VGdczOPEdxfGFjuKny/gN54mOuX7Qxmb9Rg9MCn2oKiSUeW+pjrw==", "dev": true } } @@ -643,9 +617,9 @@ "dev": true }, "ajv": { - "version": "6.7.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.7.0.tgz", - "integrity": "sha512-RZXPviBTtfmtka9n9sy1N5M5b82CbxWIR6HIis4s3WQTXDJamc/0gpCWNGz6EWdWp4DOfjzJfhz/AS9zVPjjWg==", + "version": "6.9.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.9.2.tgz", + "integrity": "sha512-4UFy0/LgDo7Oa/+wOAlj44tp9K78u38E5/359eSrqEp1Z5PdVfimCcs7SluXMP755RUQu6d2b4AvF0R1C9RZjg==", "dev": true, "requires": { "fast-deep-equal": "^2.0.1", @@ -661,9 +635,9 @@ "dev": true }, "ajv-keywords": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.3.0.tgz", - "integrity": "sha512-CMzN9S62ZOO4sA/mJZIO4S++ZM7KFWzH3PPWkveLhy4OZ9i1/VatgwWMD46w/XbGCBy7Ye0gCk+Za6mmyfKK7g==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.0.tgz", + "integrity": "sha512-aUjdRFISbuFOl0EIZc+9e4FfZp0bDZgAdOOf30bJmw8VM9v84SHyVyxDfbWxpGYbdZD/9XoKxfHVNmxPkhwyGw==", "dev": true }, "amdefine": { @@ -679,9 +653,9 @@ "dev": true }, "ansi-escapes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.1.0.tgz", - "integrity": "sha512-UgAb8H9D41AQnu/PbWlCofQVcnV4Gs2bBJi9eZPxfU/hgglFh3SMDMENRIqdr7H6XFnXdoknctFByVsCOotTVw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", "dev": true }, "ansi-html": { @@ -897,12 +871,12 @@ "dev": true }, "async": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.1.tgz", - "integrity": "sha512-fNEiL2+AZt6AlAw/29Cr0UDe4sRAHCpEHh54WMz+Bb7QfNcFw4h3loofyJpLeQs4Yx7yuqu/2dLgM5hKOs6HlQ==", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.2.tgz", + "integrity": "sha512-H1qVYh1MYhEEFLsP97cVKqCGo7KfCyTt6uEWqsTBr9SO84oK9Uwbyd/yCW+6rKJLHksBNUVWZDAjfS+Ccx0Bbg==", "dev": true, "requires": { - "lodash": "^4.17.10" + "lodash": "^4.17.11" } }, "async-each": { @@ -936,16 +910,16 @@ "dev": true }, "autoprefixer": { - "version": "9.4.6", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.4.6.tgz", - "integrity": "sha512-Yp51mevbOEdxDUy5WjiKtpQaecqYq9OqZSL04rSoCiry7Tc5I9FEyo3bfxiTJc1DfHeKwSFCUYbBAiOQ2VGfiw==", + "version": "9.4.9", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.4.9.tgz", + "integrity": "sha512-OyUl7KvbGBoFQbGQu51hMywz1aaVeud/6uX8r1R1DNcqFvqGUUy6+BDHnAZE8s5t5JyEObaSw+O1DpAdjAmLuw==", "dev": true, "requires": { - "browserslist": "^4.4.1", - "caniuse-lite": "^1.0.30000929", + "browserslist": "^4.4.2", + "caniuse-lite": "^1.0.30000939", "normalize-range": "^0.1.2", "num2fraction": "^1.2.2", - "postcss": "^7.0.13", + "postcss": "^7.0.14", "postcss-value-parser": "^3.3.1" } }, @@ -971,19 +945,21 @@ } }, "babel-jest": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-24.0.0.tgz", - "integrity": "sha512-YGKRbZUjoRmNIAyG7x4wYxUyHvHPFpYXj6Mx1A5cslhaQOUgP/+LF3wtFgMuOQkIpjbVNBufmOnVY0QVwB5v9Q==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-24.1.0.tgz", + "integrity": "sha512-MLcagnVrO9ybQGLEfZUqnOzv36iQzU7Bj4elm39vCukumLVSfoX+tRy3/jW7lUKc7XdpRmB/jech6L/UCsSZjw==", "dev": true, "requires": { "babel-plugin-istanbul": "^5.1.0", - "babel-preset-jest": "^24.0.0" + "babel-preset-jest": "^24.1.0", + "chalk": "^2.4.2", + "slash": "^2.0.0" } }, "babel-plugin-istanbul": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-5.1.0.tgz", - "integrity": "sha512-CLoXPRSUWiR8yao8bShqZUIC6qLfZVVY3X1wj+QPNXu0wfmrRRfarh1LYy+dYMVI+bDj0ghy3tuqFFRFZmL1Nw==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-5.1.1.tgz", + "integrity": "sha512-RNNVv2lsHAXJQsEJ5jonQwrJVWK8AcZpG1oxhnjCUaAjL7xahYLANhPUZbzEQHjKy1NMYUwn+0NPKQc8iSY4xQ==", "dev": true, "requires": { "find-up": "^3.0.0", @@ -992,19 +968,19 @@ } }, "babel-plugin-jest-hoist": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-24.0.0.tgz", - "integrity": "sha512-ipefE7YWNyRNVaV/MonUb/I5nef53ZRFR74P9meMGmJxqt8s1BJmfhw11YeIMbcjXN4fxtWUaskZZe8yreXE1Q==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-24.1.0.tgz", + "integrity": "sha512-gljYrZz8w1b6fJzKcsfKsipSru2DU2DmQ39aB6nV3xQ0DDv3zpIzKGortA5gknrhNnPN8DweaEgrnZdmbGmhnw==", "dev": true }, "babel-preset-jest": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-24.0.0.tgz", - "integrity": "sha512-ECMMOLvNDCmsn3geBa3JkwzylcfpThMpAdfreONQm8EmXcs4tXUpXZDQPxiIMg7nMobTuAC2zDGIKrbrBXW2Vg==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-24.1.0.tgz", + "integrity": "sha512-FfNLDxFWsNX9lUmtwY7NheGlANnagvxq8LZdl5PKnVG3umP+S/g0XbVBfwtA4Ai3Ri/IMkWabBz3Tyk9wdspcw==", "dev": true, "requires": { "@babel/plugin-syntax-object-rest-spread": "^7.0.0", - "babel-plugin-jest-hoist": "^24.0.0" + "babel-plugin-jest-hoist": "^24.1.0" } }, "balanced-match": { @@ -1096,9 +1072,9 @@ "dev": true }, "binary-extensions": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.12.0.tgz", - "integrity": "sha512-DYWGk01lDcxeS/K9IHPGWfT8PsJmbXRtRd2Sx72Tnb8pcYZQFF1oSDb8hJtS1vhp212q1Rzi5dUf9+nq0o9UIg==", + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.0.tgz", + "integrity": "sha512-EgmjVLMn22z7eGGv3kcnHwSnJXmFHjISTY9E/S5lIcTD3Oxw05QTcBLNkJFzcb3cNueUdF/IN4U+d78V0zO8Hw==", "dev": true }, "block-stream": { @@ -1157,6 +1133,12 @@ "requires": { "safer-buffer": ">= 2.1.2 < 3" } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true } } }, @@ -1174,6 +1156,11 @@ "multicast-dns-service-types": "^1.1.0" } }, + "brace": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/brace/-/brace-0.11.1.tgz", + "integrity": "sha1-SJb8ydVE7vRfS7dmDbMg07N5/lg=" + }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1320,14 +1307,14 @@ } }, "browserslist": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.4.1.tgz", - "integrity": "sha512-pEBxEXg7JwaakBXjATYw/D1YZh4QUSCX/Mnd/wnqSRPPSi1U39iDhDoKGoBUcraKdxDlrYqJxSI5nNvD+dWP2A==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.4.2.tgz", + "integrity": "sha512-ISS/AIAiHERJ3d45Fz0AVYKkgcy+F/eJHzKEvv1j0wwKGKD9T3BrwKr/5g45L+Y4XIK5PlTqefHciRFcfE1Jxg==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30000929", - "electron-to-chromium": "^1.3.103", - "node-releases": "^1.1.3" + "caniuse-lite": "^1.0.30000939", + "electron-to-chromium": "^1.3.113", + "node-releases": "^1.1.8" } }, "bs-logger": { @@ -1385,12 +1372,6 @@ "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=", "dev": true }, - "builtin-modules": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", - "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", - "dev": true - }, "builtin-status-codes": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", @@ -1496,9 +1477,9 @@ } }, "caniuse-lite": { - "version": "1.0.30000932", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000932.tgz", - "integrity": "sha512-4bghJFItvzz8m0T3lLZbacmEY9X1Z2AtIzTr7s7byqZIOumASfr4ynDx7rtm0J85nDmx8vsgR6vnaSoeU8Oh0A==", + "version": "1.0.30000939", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000939.tgz", + "integrity": "sha512-oXB23ImDJOgQpGjRv1tCtzAvJr4/OvrHi5SO2vUgB0g0xpdZZoA/BxfImiWfdwoYdUTtQrPsXsvYU/dmCSM8gg==", "dev": true }, "capture-exit": { @@ -1516,6 +1497,11 @@ "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", "dev": true }, + "chain-function": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/chain-function/-/chain-function-1.0.1.tgz", + "integrity": "sha512-SxltgMwL9uCko5/ZCLiyG2B7R9fY4pDZUw7hJ4MhirdjBLosoDqkWABi3XMucddHdLiFJMb7PD2MZifZriuMTg==" + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -1539,24 +1525,31 @@ } }, "chokidar": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.0.4.tgz", - "integrity": "sha512-z9n7yt9rOvIJrMhvDtDictKrkFHeihkNl6uWMmZlmL6tJtX9Cs+87oK+teBx+JIgzvbX3yZHT3eF8vpbDxHJXQ==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.2.tgz", + "integrity": "sha512-IwXUx0FXc5ibYmPC2XeEj5mpXoV66sR+t3jqu2NS2GYwCktt3KF1/Qqjws/NkegajBA4RbZ5+DDwlOiJsxDHEg==", "dev": true, "requires": { "anymatch": "^2.0.0", - "async-each": "^1.0.0", - "braces": "^2.3.0", - "fsevents": "^1.2.2", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", "glob-parent": "^3.1.0", - "inherits": "^2.0.1", + "inherits": "^2.0.3", "is-binary-path": "^1.0.0", "is-glob": "^4.0.0", - "lodash.debounce": "^4.0.8", - "normalize-path": "^2.1.1", + "normalize-path": "^3.0.0", "path-is-absolute": "^1.0.0", - "readdirp": "^2.0.0", - "upath": "^1.0.5" + "readdirp": "^2.2.1", + "upath": "^1.1.0" + }, + "dependencies": { + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + } } }, "chownr": { @@ -1729,12 +1722,12 @@ "dev": true }, "compressible": { - "version": "2.0.15", - "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.15.tgz", - "integrity": "sha512-4aE67DL33dSW9gw4CI2H/yTxqHLNcxp0yS6jB+4h+wr3e43+1z7vm0HU9qXOH8j+qjKuL8+UtkOxYQSMq60Ylw==", + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.16.tgz", + "integrity": "sha512-JQfEOdnI7dASwCuSPWIeVYwc/zMsu/+tRhoUvEfXz2gxOA2DNjmG5vhtFdBlhWPPGo+RdT9S3tgc/uH5qgDiiA==", "dev": true, "requires": { - "mime-db": ">= 1.36.0 < 2" + "mime-db": ">= 1.38.0 < 2" } }, "compression": { @@ -1760,6 +1753,12 @@ "requires": { "ms": "2.0.0" } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true } } }, @@ -1921,15 +1920,6 @@ "sha.js": "^2.4.8" } }, - "create-react-context": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/create-react-context/-/create-react-context-0.2.2.tgz", - "integrity": "sha512-KkpaLARMhsTsgp0d2NA/R94F/eDLbhXERdIq3LvX2biCAXcDvHYoOqHfWCHf1+OLj+HKBotLG3KqaOOf+C1C+A==", - "requires": { - "fbjs": "^0.8.0", - "gud": "^1.0.0" - } - }, "cross-spawn": { "version": "6.0.5", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", @@ -2038,24 +2028,24 @@ "dev": true }, "cssom": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.4.tgz", - "integrity": "sha512-+7prCSORpXNeR4/fUP3rL+TzqtiFfhMvTd7uEqMdgPvLPt4+uzFUeufx5RHjGTACCargg/DiEt/moMQmvnfkog==", + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.6.tgz", + "integrity": "sha512-DtUeseGk9/GBW0hl0vVPpU22iHL6YB5BUX7ml1hB+GMpo0NX5G4voX3kdWiMSEguFtcW3Vh3djqNF4aIe6ne0A==", "dev": true }, "cssstyle": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-1.1.1.tgz", - "integrity": "sha512-364AI1l/M5TYcFH83JnOH/pSqgaNnKmYgKrm0didZMGKWjQB60dymwWy1rKUgL3J1ffdq9xVi2yGLHdSjjSNog==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-1.2.1.tgz", + "integrity": "sha512-7DYm8qe+gPx/h77QlCyFmX80+fGaE/6A/Ekl0zaszYOubvySO2saYFdQ78P29D0UsULxFKCetDGNaNRUdSF+2A==", "dev": true, "requires": { "cssom": "0.3.x" } }, "csstype": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-2.6.1.tgz", - "integrity": "sha512-wv7IRqCGsL7WGKB8gPvrl+++HlFM9kxAM6jL1EXNPNTshEJYilMkbfS2SnuHha77uosp/YVK0wAp2jmlBzn1tg==", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-2.6.2.tgz", + "integrity": "sha512-Rl7PvTae0pflc1YtxtKbiSqq20Ts6vpIYOD5WBafl4y123DyHUeLrRdQP66sQW8/6gmX8jrYJLXwNeMqYVJcow==", "dev": true }, "currently-unhandled": { @@ -2118,11 +2108,11 @@ "dev": true }, "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } }, "decamelize": { @@ -2150,36 +2140,13 @@ "dev": true }, "default-gateway": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-2.7.2.tgz", - "integrity": "sha512-lAc4i9QJR0YHSDFdzeBQKfZ1SRDG3hsJNEkrpcZa8QhBfidLAilT60BDEIVUUGqosFp425KOgB3uYqcnQrWafQ==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-4.1.2.tgz", + "integrity": "sha512-xhJUAp3u02JsBGovj0V6B6uYhKCUOmiNc8xGmReUwGu77NmvcpxPVB0pCielxMFumO7CmXBG02XjM8HB97k8Hw==", "dev": true, "requires": { - "execa": "^0.10.0", + "execa": "^1.0.0", "ip-regex": "^2.1.0" - }, - "dependencies": { - "execa": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-0.10.0.tgz", - "integrity": "sha512-7XOMnz8Ynx1gGo/3hyV9loYNPWM94jG3+3T3Y8tsfSstFmETmENCMU/A/zj8Lyaj1lkgEepKepvd6240tBRvlw==", - "dev": true, - "requires": { - "cross-spawn": "^6.0.0", - "get-stream": "^3.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - } - }, - "get-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=", - "dev": true - } } }, "default-require-extensions": { @@ -2341,6 +2308,11 @@ "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", "dev": true }, + "diff-match-patch": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.4.tgz", + "integrity": "sha512-Uv3SW8bmH9nAtHKaKSanOQmj2DnlH65fUpcrMdfdaOxUG02QQ4YGZ8AE7kKOMisF7UqvOlGKVYWRvezdncW9lg==" + }, "diff-sequences": { "version": "24.0.0", "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-24.0.0.tgz", @@ -2359,12 +2331,11 @@ } }, "dir-glob": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", - "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.2.2.tgz", + "integrity": "sha512-f9LBi5QWzIW3I6e//uxZoLBlUt9kcp66qo0sSCxL6YZKc75R1c4MFCoe/LaZiBGmgujvQdxc5Bn3QhfyvK5Hsw==", "dev": true, "requires": { - "arrify": "^1.0.1", "path-type": "^3.0.0" } }, @@ -2402,9 +2373,9 @@ } }, "dom4": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/dom4/-/dom4-2.1.4.tgz", - "integrity": "sha512-7NNKNViuZYu4GaZMUsSbsV6MFsT/ZpYNKP1NT4YIUgAvwPR8ODuvQEZZ7vRC1u5Y4dHwQ7je/UNOlRRWkaCyvw==" + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/dom4/-/dom4-1.8.5.tgz", + "integrity": "sha512-ehHzOGGkVQOwU9HyZ99gHwkx4ybrRl/P1vJM7EH1nS9XsgHwO+J0KwCnVQrn8iQvpstGwFrtrX7aSNQ43QuK4A==" }, "domain-browser": { "version": "1.2.0", @@ -2427,9 +2398,9 @@ "integrity": "sha512-0sYnfUHHMoajaud/i5BHKA12bUxiWEHJ9rxGqVEppFxsEcxef0TZQ5J59lU+UniEBcz/sG5fTESRyS7cOm3tSQ==" }, "duplexify": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", - "integrity": "sha512-vM58DwdnKmty+FSPzT14K9JXb90H+j5emaR4KYbr2KTIz00WHGbWOe5ghQTx233ZCLZtrGDALzKwcjEtSt35mA==", + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", + "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", "dev": true, "requires": { "end-of-stream": "^1.0.0", @@ -2455,9 +2426,9 @@ "dev": true }, "electron-to-chromium": { - "version": "1.3.108", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.108.tgz", - "integrity": "sha512-/QI4hMpAh48a1Sea6PALGv+kuVne9A2EWGd8HrWHMdYhIzGtbhVVHh6heL5fAzGaDnZuPyrlWJRl8WPm4RyiQQ==", + "version": "1.3.113", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.113.tgz", + "integrity": "sha512-De+lPAxEcpxvqPTyZAXELNpRZXABRxf+uL/rSykstQhzj/B0l1150G/ExIIxKc16lI89Hgz81J0BHAcbTqK49g==", "dev": true }, "elliptic": { @@ -2590,9 +2561,9 @@ "dev": true }, "escodegen": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.11.0.tgz", - "integrity": "sha512-IeMV45ReixHS53K/OmfKAIztN/igDHzTJUhZM3k1jMhIZWjk45SMwAtBsEXiJp3vSPmTcu6CXn7mDvFHRN66fw==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.11.1.tgz", + "integrity": "sha512-JwiqFD9KdGVVpeuRa68yU3zZnBEOcPs0nKW7wZzXky8Z7tffdYUHbe11bPCV5jYlK6DVdKLWLm0f5I/QlL0Kmw==", "dev": true, "requires": { "esprima": "^3.1.3", @@ -2755,6 +2726,12 @@ "requires": { "is-extendable": "^0.1.0" } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true } } }, @@ -2768,9 +2745,9 @@ } }, "expect": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/expect/-/expect-24.0.0.tgz", - "integrity": "sha512-qDHRU4lGsme0xjg8dXp/RQhvO9XIo9FWqVo7dTHDPBwzy25JGEHAWFsnpmRYErB50tgi/6euo3ir5e/kF9LUTA==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-24.1.0.tgz", + "integrity": "sha512-lVcAPhaYkQcIyMS+F8RVwzbm1jro20IG8OkvxQ6f1JfqhVZyyudCwYogQ7wnktlf14iF3ii7ArIUO/mqvrW9Gw==", "dev": true, "requires": { "ansi-styles": "^3.2.0", @@ -2833,6 +2810,12 @@ "ms": "2.0.0" } }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, "path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", @@ -3071,6 +3054,12 @@ "requires": { "ms": "2.0.0" } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true } } }, @@ -3124,21 +3113,21 @@ "dev": true }, "flush-write-stream": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.3.tgz", - "integrity": "sha512-calZMC10u0FMUqoiunI2AiGIIUtUIvifNwkHhNupZH4cbNnW1Itkoh/Nf5HFYmDrwWPjrUxpkZT0KhuCq0jmGw==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.1.1.tgz", + "integrity": "sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w==", "dev": true, "requires": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.4" + "inherits": "^2.0.3", + "readable-stream": "^2.3.6" } }, "follow-redirects": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.6.1.tgz", - "integrity": "sha512-t2JCjbzxQpWvbhts3l6SH1DKzSrx8a+SsaVf4h6bG4kOXUuPYS/kg2Lr4gQSb7eemaHqJkOThF1BGyjlUkO1GQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.7.0.tgz", + "integrity": "sha512-m/pZQy4Gj287eNy94nivy5wchN3Kp+Q5WgUPNy5lJSZ3sgkVKSYV/ZChMAQVIgx1SqfZ2zBZtPA2YlXIWxxJOQ==", "requires": { - "debug": "=3.1.0" + "debug": "^3.2.6" } }, "for-in": { @@ -3929,12 +3918,6 @@ "resolve-dir": "^1.0.0" } }, - "global-modules-path": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/global-modules-path/-/global-modules-path-2.3.1.tgz", - "integrity": "sha512-y+shkf4InI7mPRHSo2b/k6ix6+NLDtyccYv86whhxrSGX9wjPX1VMITmrDbE1eh7zkzhiWtW2sHklJYoQ62Cxg==", - "dev": true - }, "global-prefix": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", @@ -3949,30 +3932,30 @@ } }, "globals": { - "version": "11.10.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.10.0.tgz", - "integrity": "sha512-0GZF1RiPKU97IHUO5TORo9w1PwrH/NBPl+fS7oMLdaTRiYmYbwK4NWoZWrAdd0/abG9R2BU+OiwyQpTpE6pdfQ==", + "version": "11.11.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.11.0.tgz", + "integrity": "sha512-WHq43gS+6ufNOEqlrDBxVEbb8ntfXrfAUU2ZOpCxrBdGKW3gyv8mCxAfIBD0DroPKGrJ2eSsXsLtY9MPntsyTw==", "dev": true }, "globby": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", - "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-9.0.0.tgz", + "integrity": "sha512-q0qiO/p1w/yJ0hk8V9x1UXlgsXUxlGd0AHUOXZVXBO6aznDtpx7M8D1kBrCAItoPm+4l8r6ATXV1JpjY2SBQOw==", "dev": true, "requires": { - "array-union": "^1.0.1", - "dir-glob": "2.0.0", - "fast-glob": "^2.0.2", - "glob": "^7.1.2", - "ignore": "^3.3.5", - "pify": "^3.0.0", - "slash": "^1.0.0" + "array-union": "^1.0.2", + "dir-glob": "^2.2.1", + "fast-glob": "^2.2.6", + "glob": "^7.1.3", + "ignore": "^4.0.3", + "pify": "^4.0.1", + "slash": "^2.0.0" }, "dependencies": { - "slash": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", - "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=", + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", "dev": true } } @@ -4006,11 +3989,6 @@ "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=", "dev": true }, - "gud": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gud/-/gud-1.0.0.tgz", - "integrity": "sha512-zGEOVKFM5sVPPrYs7J5/hYEw2Pof8KCyOwyhG8sAF26mCAeUFAcYPu1mwB7hhpIP29zOIBaDqwuHdLp0jvZXjw==" - }, "handle-thing": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.0.tgz", @@ -4161,16 +4139,6 @@ "resolve-pathname": "^2.2.0", "value-equal": "^0.4.0", "warning": "^3.0.0" - }, - "dependencies": { - "warning": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", - "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", - "requires": { - "loose-envify": "^1.0.0" - } - } } }, "hjson": { @@ -4195,9 +4163,9 @@ "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==" }, "homedir-polyfill": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.1.tgz", - "integrity": "sha1-TCu8inWJmP7r9e1oWA921GdotLw=", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz", + "integrity": "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==", "dev": true, "requires": { "parse-passwd": "^1.0.0" @@ -4272,15 +4240,15 @@ } }, "http-proxy-middleware": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.18.0.tgz", - "integrity": "sha512-Fs25KVMPAIIcgjMZkVHJoKg9VcXcC1C8yb9JUgeDvVXY0S/zgVIhMb+qVswDIgtJe2DfckMSY2d6TuTEutlk6Q==", + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz", + "integrity": "sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==", "dev": true, "requires": { - "http-proxy": "^1.16.2", + "http-proxy": "^1.17.0", "is-glob": "^4.0.0", - "lodash": "^4.17.5", - "micromatch": "^3.1.9" + "lodash": "^4.17.11", + "micromatch": "^3.1.10" } }, "http-signature": { @@ -4345,9 +4313,9 @@ "dev": true }, "ignore": { - "version": "3.3.10", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", - "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", "dev": true }, "ignore-styles": { @@ -4440,13 +4408,21 @@ "dev": true }, "internal-ip": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-3.0.1.tgz", - "integrity": "sha512-NXXgESC2nNVtU+pqmC9e6R8B1GpKxzsAQhffvh5AL79qKnodd+L7tnEQmTiUAVngqLalPbSqRA7XGIEL5nCd0Q==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.2.0.tgz", + "integrity": "sha512-ZY8Rk+hlvFeuMmG5uH1MXhhdeMntmIaxaInvAmzMq/SHV8rv4Kh+6GiQNNDQd0wZFrcO+FiTBo8lui/osKOyJw==", "dev": true, "requires": { - "default-gateway": "^2.6.0", - "ipaddr.js": "^1.5.2" + "default-gateway": "^4.0.1", + "ipaddr.js": "^1.9.0" + }, + "dependencies": { + "ipaddr.js": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.0.tgz", + "integrity": "sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA==", + "dev": true + } } }, "interpret": { @@ -4527,15 +4503,6 @@ "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" }, - "is-builtin-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", - "integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=", - "dev": true, - "requires": { - "builtin-modules": "^1.0.0" - } - }, "is-callable": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", @@ -4773,9 +4740,9 @@ "dev": true }, "istanbul-api": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/istanbul-api/-/istanbul-api-2.1.0.tgz", - "integrity": "sha512-+Ygg4t1StoiNlBGc6x0f8q/Bv26FbZqP/+jegzfNpU7Q8o+4ZRoJxJPhBkgE/UonpAjtxnE4zCZIyJX+MwLRMQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/istanbul-api/-/istanbul-api-2.1.1.tgz", + "integrity": "sha512-kVmYrehiwyeBAk/wE71tW6emzLiHGjYIiDrc8sfyty4F8M02/lrgXSm+R1kXysmF20zArvmZXjlE/mg24TVPJw==", "dev": true, "requires": { "async": "^2.6.1", @@ -4786,7 +4753,7 @@ "istanbul-lib-instrument": "^3.1.0", "istanbul-lib-report": "^2.0.4", "istanbul-lib-source-maps": "^3.0.2", - "istanbul-reports": "^2.1.0", + "istanbul-reports": "^2.1.1", "js-yaml": "^3.12.0", "make-dir": "^1.3.0", "minimatch": "^3.0.4", @@ -4855,38 +4822,32 @@ "requires": { "ms": "^2.1.1" } - }, - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true } } }, "istanbul-reports": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.1.0.tgz", - "integrity": "sha512-azQdSX+dtTtkQEfqq20ICxWi6eOHXyHIgMFw1VOOVi8iIPWeCWRgCyFh/CsBKIhcgskMI8ExXmU7rjXTRCIJ+A==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.1.1.tgz", + "integrity": "sha512-FzNahnidyEPBCI0HcufJoSEoKykesRlFcSzQqjH9x0+LC8tnnE/p/90PBLu8iZTxr8yYZNyTtiAujUqyN+CIxw==", "dev": true, "requires": { - "handlebars": "^4.0.11" + "handlebars": "^4.1.0" } }, "jest": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest/-/jest-24.0.0.tgz", - "integrity": "sha512-1Z2EblP4BnERbWZGtipGb9zjHDq7nCHgCY7V57F5SYaFRJV4DE1HKoOz+CRC5OrAThN9OVhRlUhTzsTFArg2iQ==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-24.1.0.tgz", + "integrity": "sha512-+q91L65kypqklvlRFfXfdzUKyngQLOcwGhXQaLmVHv+d09LkNXuBuGxlofTFW42XMzu3giIcChchTsCNUjQ78A==", "dev": true, "requires": { "import-local": "^2.0.0", - "jest-cli": "^24.0.0" + "jest-cli": "^24.1.0" }, "dependencies": { "jest-cli": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-24.0.0.tgz", - "integrity": "sha512-mElnFipLaGxo1SiQ1CLvuaz3eX07MJc4HcyKrApSJf8xSdY1/EwaHurKwu1g2cDiwIgY8uHj7UcF5OYbtiBOWg==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-24.1.0.tgz", + "integrity": "sha512-U/iyWPwOI0T1CIxVLtk/2uviOTJ/OiSWJSe8qt6X1VkbbgP+nrtLJlmT9lPBe4lK78VNFJtrJ7pttcNv/s7yCw==", "dev": true, "requires": { "ansi-escapes": "^3.0.0", @@ -4901,16 +4862,16 @@ "istanbul-lib-instrument": "^3.0.1", "istanbul-lib-source-maps": "^3.0.1", "jest-changed-files": "^24.0.0", - "jest-config": "^24.0.0", + "jest-config": "^24.1.0", "jest-environment-jsdom": "^24.0.0", "jest-get-type": "^24.0.0", "jest-haste-map": "^24.0.0", "jest-message-util": "^24.0.0", "jest-regex-util": "^24.0.0", - "jest-resolve-dependencies": "^24.0.0", - "jest-runner": "^24.0.0", - "jest-runtime": "^24.0.0", - "jest-snapshot": "^24.0.0", + "jest-resolve-dependencies": "^24.1.0", + "jest-runner": "^24.1.0", + "jest-runtime": "^24.1.0", + "jest-snapshot": "^24.1.0", "jest-util": "^24.0.0", "jest-validate": "^24.0.0", "jest-watcher": "^24.0.0", @@ -4942,27 +4903,26 @@ } }, "jest-config": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-24.0.0.tgz", - "integrity": "sha512-9/soqWL5YSq1ZJtgVJ5YYPCL1f9Mi2lVCp5+OXuYBOaN8DHSFRCSWip0rQ6N+mPTOEIAlCvcUH8zaPOwK4hePg==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-24.1.0.tgz", + "integrity": "sha512-FbbRzRqtFC6eGjG5VwsbW4E5dW3zqJKLWYiZWhB0/4E5fgsMw8GODLbGSrY5t17kKOtCWb/Z7nsIThRoDpuVyg==", "dev": true, "requires": { "@babel/core": "^7.1.0", - "babel-jest": "^24.0.0", + "babel-jest": "^24.1.0", "chalk": "^2.0.1", "glob": "^7.1.1", "jest-environment-jsdom": "^24.0.0", "jest-environment-node": "^24.0.0", "jest-get-type": "^24.0.0", - "jest-jasmine2": "^24.0.0", + "jest-jasmine2": "^24.1.0", "jest-regex-util": "^24.0.0", - "jest-resolve": "^24.0.0", + "jest-resolve": "^24.1.0", "jest-util": "^24.0.0", "jest-validate": "^24.0.0", "micromatch": "^3.1.10", "pretty-format": "^24.0.0", - "realpath-native": "^1.0.2", - "uuid": "^3.3.2" + "realpath-native": "^1.0.2" } }, "jest-diff": { @@ -5042,22 +5002,23 @@ } }, "jest-jasmine2": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-24.0.0.tgz", - "integrity": "sha512-q1xEV9KHM0bgfBj3yrkrjRF5kxpNDkWPCwVfSPN1DC+pD6J5wrM9/u2BgzhKhALXiaZUUhJ+f/OcEC0Gwpw90A==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-24.1.0.tgz", + "integrity": "sha512-H+o76SdSNyCh9fM5K8upK45YTo/DiFx5w2YAzblQebSQmukDcoVBVeXynyr7DDnxh+0NTHYRCLwJVf3tC518wg==", "dev": true, "requires": { "@babel/traverse": "^7.1.0", "chalk": "^2.0.1", "co": "^4.6.0", - "expect": "^24.0.0", + "expect": "^24.1.0", "is-generator-fn": "^2.0.0", "jest-each": "^24.0.0", "jest-matcher-utils": "^24.0.0", "jest-message-util": "^24.0.0", - "jest-snapshot": "^24.0.0", + "jest-snapshot": "^24.1.0", "jest-util": "^24.0.0", - "pretty-format": "^24.0.0" + "pretty-format": "^24.0.0", + "throat": "^4.0.0" } }, "jest-leak-detector": { @@ -5107,9 +5068,9 @@ "dev": true }, "jest-resolve": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-24.0.0.tgz", - "integrity": "sha512-uKDGyJqNaBQKox1DJzm27CJobADsIMNgZGusXhtYzl98LKu/fKuokkRsd7EBVgoDA80HKHc3LOPKuYLryMu1vw==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-24.1.0.tgz", + "integrity": "sha512-TPiAIVp3TG6zAxH28u/6eogbwrvZjBMWroSLBDkwkHKrqxB/RIdwkWDye4uqPlZIXWIaHtifY3L0/eO5Z0f2wg==", "dev": true, "requires": { "browser-resolve": "^1.11.3", @@ -5118,30 +5079,31 @@ } }, "jest-resolve-dependencies": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-24.0.0.tgz", - "integrity": "sha512-CJGS5ME2g5wL16o3Y22ga9p5ntNT5CUYX40/0lYj9ic9jB5YHm/qMKTgbFt9kowEBiMOFpXy15dWtBTEU54+zg==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-24.1.0.tgz", + "integrity": "sha512-2VwPsjd3kRPu7qe2cpytAgowCObk5AKeizfXuuiwgm1a9sijJDZe8Kh1sFj6FKvSaNEfCPlBVkZEJa2482m/Uw==", "dev": true, "requires": { "jest-regex-util": "^24.0.0", - "jest-snapshot": "^24.0.0" + "jest-snapshot": "^24.1.0" } }, "jest-runner": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-24.0.0.tgz", - "integrity": "sha512-XefXm2XimKtwdfi2am4364GfCmLD1tOjiRtDexY65diCXt4Rw23rxj2wiW7p9s8Nh9dzJQNmrheqZ5rzvn762g==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-24.1.0.tgz", + "integrity": "sha512-CDGOkT3AIFl16BLL/OdbtYgYvbAprwJ+ExKuLZmGSCSldwsuU2dEGauqkpvd9nphVdAnJUcP12e/EIlnTX0QXg==", "dev": true, "requires": { + "chalk": "^2.4.2", "exit": "^0.1.2", "graceful-fs": "^4.1.15", - "jest-config": "^24.0.0", + "jest-config": "^24.1.0", "jest-docblock": "^24.0.0", "jest-haste-map": "^24.0.0", - "jest-jasmine2": "^24.0.0", + "jest-jasmine2": "^24.1.0", "jest-leak-detector": "^24.0.0", "jest-message-util": "^24.0.0", - "jest-runtime": "^24.0.0", + "jest-runtime": "^24.1.0", "jest-util": "^24.0.0", "jest-worker": "^24.0.0", "source-map-support": "^0.5.6", @@ -5149,9 +5111,9 @@ } }, "jest-runtime": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-24.0.0.tgz", - "integrity": "sha512-UeVoTGiij8upcqfyBlJvImws7IGY+ZWtgVpt1h4VmVbyei39tVGia/20VoP3yvodS6FdjTwBj+JzVNuoh/9UTw==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-24.1.0.tgz", + "integrity": "sha512-59/BY6OCuTXxGeDhEMU7+N33dpMQyXq7MLK07cNSIY/QYt2QZgJ7Tjx+rykBI0skAoigFl0A5tmT8UdwX92YuQ==", "dev": true, "requires": { "@babel/core": "^7.1.0", @@ -5162,19 +5124,19 @@ "fast-json-stable-stringify": "^2.0.0", "glob": "^7.1.3", "graceful-fs": "^4.1.15", - "jest-config": "^24.0.0", + "jest-config": "^24.1.0", "jest-haste-map": "^24.0.0", "jest-message-util": "^24.0.0", "jest-regex-util": "^24.0.0", - "jest-resolve": "^24.0.0", - "jest-snapshot": "^24.0.0", + "jest-resolve": "^24.1.0", + "jest-snapshot": "^24.1.0", "jest-util": "^24.0.0", "jest-validate": "^24.0.0", "micromatch": "^3.1.10", "realpath-native": "^1.0.0", "slash": "^2.0.0", - "strip-bom": "3.0.0", - "write-file-atomic": "^2.4.2", + "strip-bom": "^3.0.0", + "write-file-atomic": "2.4.1", "yargs": "^12.0.2" } }, @@ -5185,9 +5147,9 @@ "dev": true }, "jest-snapshot": { - "version": "24.0.0", - "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-24.0.0.tgz", - "integrity": "sha512-7OcrckVnfzVYxSGPYl2Sn+HyT30VpDv+FMBFbQxSQ6DV2K9Js6vYT6d4SBPKp6DfDiEL2txNssJBxtlvF+Dymw==", + "version": "24.1.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-24.1.0.tgz", + "integrity": "sha512-th6TDfFqEmXvuViacU1ikD7xFb7lQsPn2rJl7OEmnfIVpnrx3QNY2t3PE88meeg0u/mQ0nkyvmC05PBqO4USFA==", "dev": true, "requires": { "@babel/types": "^7.0.0", @@ -5195,7 +5157,7 @@ "jest-diff": "^24.0.0", "jest-matcher-utils": "^24.0.0", "jest-message-util": "^24.0.0", - "jest-resolve": "^24.0.0", + "jest-resolve": "^24.1.0", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", "pretty-format": "^24.0.0", @@ -5393,9 +5355,9 @@ "dev": true }, "kleur": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.1.tgz", - "integrity": "sha512-P3kRv+B+Ra070ng2VKQqW4qW7gd/v3iD8sy/zOdcYRsfiD+QBokQNOps/AfP6Hr48cBhIIBFWckB9aO+IZhrWg==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.2.tgz", + "integrity": "sha512-3h7B2WRT5LNXOtQiAaWonilegHcPSf9nLVXlSTci8lu1dZUuui61+EsPEZqSVxY7rXYmB2DVKMQILxaO5WL61Q==", "dev": true }, "lcid": { @@ -5429,12 +5391,6 @@ "type-check": "~0.3.2" } }, - "lightercollective": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/lightercollective/-/lightercollective-0.1.0.tgz", - "integrity": "sha512-J9tg5uraYoQKaWbmrzDDexbG6hHnMcWS1qLYgJSWE+mpA3U5OCSeMUhb+K55otgZJ34oFdR0ECvdIb3xuO5JOQ==", - "dev": true - }, "load-json-file": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", @@ -5503,6 +5459,16 @@ "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" }, + "lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" + }, + "lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" + }, "lodash.mergewith": { "version": "4.6.1", "resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.1.tgz", @@ -5607,6 +5573,12 @@ "tmpl": "1.0.x" } }, + "mamacro": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/mamacro/-/mamacro-0.0.3.tgz", + "integrity": "sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA==", + "dev": true + }, "map-age-cleaner": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", @@ -5655,14 +5627,14 @@ "dev": true }, "mem": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mem/-/mem-4.0.0.tgz", - "integrity": "sha512-WQxG/5xYc3tMbYLXoXPm81ET2WDULiU5FxbuIoNbJqLOOI8zehXFdZuiUEgfdrU2mVB1pxBZUGlYORSrpuJreA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-4.1.0.tgz", + "integrity": "sha512-I5u6Q1x7wxO0kdOpYBB28xueHADYps5uty/zg936CiG8NTe5sJL8EjrCuLneuDW3PlMdZBGDIn8BirEVdovZvg==", "dev": true, "requires": { "map-age-cleaner": "^0.1.1", "mimic-fn": "^1.0.0", - "p-is-promise": "^1.1.0" + "p-is-promise": "^2.0.0" } }, "memory-fs": { @@ -5854,18 +5826,18 @@ "dev": true }, "mime-db": { - "version": "1.37.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz", - "integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg==", + "version": "1.38.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.38.0.tgz", + "integrity": "sha512-bqVioMFFzc2awcdJZIzR3HjZFX20QhilVS7hytkKrv7xFAn8bM1gzc/FOX2awLISvWe0PV8ptFKcon+wZ5qYkg==", "dev": true }, "mime-types": { - "version": "2.1.21", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz", - "integrity": "sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==", + "version": "2.1.22", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.22.tgz", + "integrity": "sha512-aGl6TZGnhm/li6F7yx82bJiBZwgiEa4Hf6CNr8YO+r5UHr53tSTYZb102zyU50DOWWKeOv0uQLRL0/9EiKWCog==", "dev": true, "requires": { - "mime-db": "~1.37.0" + "mime-db": "~1.38.0" } }, "mimic-fn": { @@ -6000,6 +5972,15 @@ "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", "dev": true }, + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, "glob": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", @@ -6014,6 +5995,12 @@ "path-is-absolute": "^1.0.0" } }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, "supports-color": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", @@ -6040,9 +6027,9 @@ } }, "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" }, "multicast-dns": { "version": "6.2.3", @@ -6204,21 +6191,22 @@ "dev": true }, "node-notifier": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-5.3.0.tgz", - "integrity": "sha512-AhENzCSGZnZJgBARsUjnQ7DnZbzyP+HxlVXuD0xqAnvL8q+OqtSX7lGg9e8nHzwXkMMXNdVeqq4E2M3EUAqX6Q==", + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-5.4.0.tgz", + "integrity": "sha512-SUDEb+o71XR5lXSTyivXd9J7fCloE3SyP4lSgt3lU2oSANiox+SxlNRGPjDKrwU1YN3ix2KN/VGGCg0t01rttQ==", "dev": true, "requires": { "growly": "^1.3.0", + "is-wsl": "^1.1.0", "semver": "^5.5.0", "shellwords": "^0.1.1", "which": "^1.3.0" } }, "node-releases": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.5.tgz", - "integrity": "sha512-6C2K0x1QlYTz9wCueMN/DVZFcBVg/qsj2k9iV5gV/+OvG4KNrl7Nu7TWbWFQ3/Z2V10qVFQWtj5Xa+VBodcI6g==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.8.tgz", + "integrity": "sha512-gQm+K9mGCiT/NXHy+V/ZZS1N/LOaGGqRAAJJs3X9Ah1g+CIbRcBgNyoNYQ+SEtcyAtB9KqDruu+fF7nWjsqRaA==", "dev": true, "requires": { "semver": "^5.3.0" @@ -6329,13 +6317,13 @@ } }, "normalize-package-data": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz", - "integrity": "sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", "dev": true, "requires": { "hosted-git-info": "^2.1.4", - "is-builtin-module": "^1.0.0", + "resolve": "^1.10.0", "semver": "2 || 3 || 4 || 5", "validate-npm-package-license": "^3.0.1" } @@ -6356,9 +6344,9 @@ "dev": true }, "normalize.css": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/normalize.css/-/normalize.css-8.0.1.tgz", - "integrity": "sha512-qizSNPO93t1YUuUhP22btGOo3chcvDFqFaj2TRybP0DMxkHOCTYwp3n34fel4a31ORXy4m1Xq0Gyqpb5m33qIg==" + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/normalize.css/-/normalize.css-4.1.1.tgz", + "integrity": "sha1-TwsdWiNTgyUrBNhWa4Zsxfytnww=" }, "npm-run-path": { "version": "2.0.2", @@ -6399,9 +6387,9 @@ "integrity": "sha1-StCAk21EPCVhrtnyGX7//iX05QY=" }, "nwsapi": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.0.9.tgz", - "integrity": "sha512-nlWFSCTYQcHk/6A9FFnfhKc14c3aFhfdNBXgo8Qgi9QTBu/qg3Ww+Uiz9wMzXd1T8GFxPc2QIHB6Qtf2XFryFQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.1.1.tgz", + "integrity": "sha512-T5GaA1J/d34AC8mkrFD2O0DR17kwJ702ZOtJOsS8RpbsQZVOC2/xYFb1i/cw+xdM54JIlMuojjDOYct8GIWtwg==", "dev": true }, "oauth-sign": { @@ -6447,9 +6435,9 @@ } }, "object-keys": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.12.tgz", - "integrity": "sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.0.tgz", + "integrity": "sha512-6OO5X1+2tYkNyNEx6TsCxEqFfRWaqx6EtMiSbGrw8Ob8v9Ne+Hl8rBAgLBZn5wjEz3s/s6U1WXFUFOcxxAwUpg==" }, "object-visit": { "version": "1.0.1", @@ -6516,9 +6504,9 @@ } }, "on-headers": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.1.tgz", - "integrity": "sha1-ko9dD0cNSTQmUepnlLCFfBAGk/c=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", "dev": true }, "once": { @@ -6649,9 +6637,9 @@ "dev": true }, "p-is-promise": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", - "integrity": "sha1-nJRWmJ6fZYgBewQ01WCXZ1w9oF4=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-2.0.0.tgz", + "integrity": "sha512-pzQPhYMCAgLAKPWD2jC3Se9fEfrD9npNos0y150EeqZll7akhEgGhTW/slB6lHku8AvYGiJ+YJ5hfHKePPgFWg==", "dev": true }, "p-limit": { @@ -6708,9 +6696,9 @@ } }, "parse-asn1": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.3.tgz", - "integrity": "sha512-VrPoetlz7B/FqjBLD2f5wBVZvsZVLnRUrxVLfRYhGXCODa/NWE4p3Wp+6+aV3ZPL3KM7/OZmxDIwwijD7yuucg==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.4.tgz", + "integrity": "sha512-Qs5duJcuvNExRfFZ99HDD3z4mAi3r9Wl/FOjEOijlxwCZs7E7mW2vjTpgQ4J8LpTF8x5v+1Vn5UQFejmWT11aw==", "dev": true, "requires": { "asn1.js": "^4.0.0", @@ -6855,9 +6843,9 @@ } }, "pirates": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.0.tgz", - "integrity": "sha512-8t5BsXy1LUIjn3WWOlOuFDuKswhQb/tkak641lvBgmPOBUQHXveORtlMCp6OdPV1dtuTaEahKA8VNz6uLfKBtA==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.1.tgz", + "integrity": "sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==", "dev": true, "requires": { "node-modules-regexp": "^1.0.0" @@ -6878,11 +6866,6 @@ "integrity": "sha512-2qHaIQr2VLRFoxe2nASzsV6ef4yOOH+Fi9FBOVH6cqeSgUnoyySPZkxzLuzd+RYOQTRpROA0ztTMqxROKSb/nA==", "dev": true }, - "popper.js": { - "version": "1.14.6", - "resolved": "https://registry.npmjs.org/popper.js/-/popper.js-1.14.6.tgz", - "integrity": "sha512-AGwHGQBKumlk/MDfrSOf0JHhJCImdDMcGNoqKmKkU+68GFazv3CQ6q9r7Ja1sKDZmYWTckY/uLyEznheTDycnA==" - }, "portfinder": { "version": "1.0.20", "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.20.tgz", @@ -6908,6 +6891,12 @@ "requires": { "ms": "2.0.0" } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true } } }, @@ -6939,9 +6928,9 @@ } }, "postcss-cli": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/postcss-cli/-/postcss-cli-6.1.1.tgz", - "integrity": "sha512-18PQO4qCDWY6vggnG3k+i5zrUnRc4I6P4MpKQWGbNyTfWBaRgu/nScunw6VH5QnUKtRu0NuPF5SpxhcYzWVXDg==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-cli/-/postcss-cli-6.1.2.tgz", + "integrity": "sha512-jIWfIkqt8cTThSpH8DBaNxHlBf99OKSem2RseRpfVPqWayxHKQB0IWdS/IF5XSGeFU5QslSDTdVHnw6qggXGkA==", "dev": true, "requires": { "chalk": "^2.1.0", @@ -6949,7 +6938,7 @@ "dependency-graph": "^0.8.0", "fs-extra": "^7.0.0", "get-stdin": "^6.0.0", - "globby": "^8.0.0", + "globby": "^9.0.0", "postcss": "^7.0.0", "postcss-load-config": "^2.0.0", "postcss-reporter": "^6.0.0", @@ -7194,9 +7183,9 @@ } }, "postcss-modules-local-by-default": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.4.tgz", - "integrity": "sha512-WvuSaTKXUqYJbnT7R3YrsNrHv/C5vRfr5VglS4bFOk0MYT4CLBfc/xgExA+x2RftlYgiBDvWmVs191Xv8S8gZQ==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-2.0.5.tgz", + "integrity": "sha512-iFgxlCAVLno5wIJq+4hyuOmc4VjZEZxzpdeuZcBytLNWEK5Bx2oRF9PPcAz5TALbaFvrZm8sJYtJ3hV+tMSEIg==", "dev": true, "requires": { "css-selector-tokenizer": "^0.7.0", @@ -7442,22 +7431,23 @@ "dev": true }, "prompts": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.0.1.tgz", - "integrity": "sha512-8lnEOSIGQbgbnO47+13S+H204L8ISogGulyi0/NNEFAQ9D1VMNTrJ9SBX2Ra03V4iPn/zt36HQMndRYkaPoWiQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.0.3.tgz", + "integrity": "sha512-H8oWEoRZpybm6NV4to9/1limhttEo13xK62pNvn2JzY0MA03p7s0OjtmhXyon3uJmxiJJVSuUwEJFFssI3eBiQ==", "dev": true, "requires": { - "kleur": "^3.0.0", + "kleur": "^3.0.2", "sisteransi": "^1.0.0" } }, "prop-types": { - "version": "15.6.2", - "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.6.2.tgz", - "integrity": "sha512-3pboPvLiWD7dkI3qf3KbUe6hKFKa52w+AE0VCqECtf+QHAKgOL37tTaNCnuX1nAAQ4ZhyP+kYVKf8rLmJ/feDQ==", + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", "requires": { - "loose-envify": "^1.3.1", - "object-assign": "^4.1.1" + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" } }, "proxy-addr": { @@ -7541,6 +7531,14 @@ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", "dev": true }, + "pure-render-decorator": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pure-render-decorator/-/pure-render-decorator-1.1.1.tgz", + "integrity": "sha1-9eC3bOEoeOadpBp+6GJ/71S19Xo=", + "requires": { + "fbjs": "^0.8.0" + } + }, "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", @@ -7566,9 +7564,9 @@ "dev": true }, "randombytes": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.0.6.tgz", - "integrity": "sha512-CIQ5OFxf4Jou6uOKe9t1AOgqpeU5fd70A8NPdHSGeYXqXsPe6peOwI0cUl88RWZ6sP1vPMV3avd/R6cZ5/sP1A==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", "dev": true, "requires": { "safe-buffer": "^5.1.0" @@ -7614,44 +7612,51 @@ } }, "react": { - "version": "16.7.0", - "resolved": "https://registry.npmjs.org/react/-/react-16.7.0.tgz", - "integrity": "sha512-StCz3QY8lxTb5cl2HJxjwLFOXPIFQp+p+hxQfc8WE0QiLfCtIlKj8/+5tjjKm8uSTlAW+fCPaavGFS06V9Ar3A==", + "version": "16.8.3", + "resolved": "https://registry.npmjs.org/react/-/react-16.8.3.tgz", + "integrity": "sha512-3UoSIsEq8yTJuSu0luO1QQWYbgGEILm+eJl2QN/VLDi7hL+EN18M3q3oVZwmVzzBJ3DkM7RMdRwBmZZ+b4IzSA==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", - "scheduler": "^0.12.0" + "scheduler": "^0.13.3" + } + }, + "react-ace": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/react-ace/-/react-ace-6.4.0.tgz", + "integrity": "sha512-woTTgGk9x4GRRWiM4QLNOspjaJAYLX3UZ3J2XRYQvJiN6wyxrFY9x7rdOKc+4Tj+khb/ccPiDj/kll4UeJEDPw==", + "requires": { + "brace": "^0.11.1", + "diff-match-patch": "^1.0.4", + "lodash.get": "^4.4.2", + "lodash.isequal": "^4.5.0", + "prop-types": "^15.6.2" + } + }, + "react-addons-css-transition-group": { + "version": "15.6.2", + "resolved": "https://registry.npmjs.org/react-addons-css-transition-group/-/react-addons-css-transition-group-15.6.2.tgz", + "integrity": "sha1-nkN2vPQLUhfRTsaFUwgc7ksIptY=", + "requires": { + "react-transition-group": "^1.2.0" } }, "react-dom": { - "version": "16.7.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-16.7.0.tgz", - "integrity": "sha512-D0Ufv1ExCAmF38P2Uh1lwpminZFRXEINJe53zRAbm4KPwSyd6DY/uDoS0Blj9jvPpn1+wivKpZYc8aAAN/nAkg==", + "version": "16.8.3", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-16.8.3.tgz", + "integrity": "sha512-ttMem9yJL4/lpItZAQ2NTFAbV7frotHk5DZEHXUOws2rMmrsvh1Na7ThGT0dTzUIl6pqTOi5tYREfL8AEna3lA==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", - "scheduler": "^0.12.0" + "scheduler": "^0.13.3" } }, - "react-lifecycles-compat": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", - "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==" - }, - "react-popper": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/react-popper/-/react-popper-1.3.2.tgz", - "integrity": "sha512-UbFWj55Yt9uqvy0oZ+vULDL2Bw1oxeZF9/JzGyxQ5ypgauRH/XlarA5+HLZWro/Zss6Ht2kqpegtb6sYL8GUGw==", - "requires": { - "@babel/runtime": "^7.1.2", - "create-react-context": "<=0.2.2", - "popper.js": "^1.14.4", - "prop-types": "^15.6.1", - "typed-styles": "^0.0.7", - "warning": "^4.0.2" - } + "react-is": { + "version": "16.8.3", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.8.3.tgz", + "integrity": "sha512-Y4rC1ZJmsxxkkPuMLwvKvlL1Zfpbcu+Bf4ZigkHup3v9EfdYhAlWAaVyA19olXq2o2mGn0w+dFKvk3pVVlYcIA==" }, "react-router": { "version": "4.3.1", @@ -7665,6 +7670,16 @@ "path-to-regexp": "^1.7.0", "prop-types": "^15.6.1", "warning": "^4.0.1" + }, + "dependencies": { + "warning": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz", + "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "requires": { + "loose-envify": "^1.0.0" + } + } } }, "react-router-dom": { @@ -7678,25 +7693,36 @@ "prop-types": "^15.6.1", "react-router": "^4.3.1", "warning": "^4.0.1" + }, + "dependencies": { + "warning": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz", + "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "requires": { + "loose-envify": "^1.0.0" + } + } } }, "react-table": { - "version": "6.8.6", - "resolved": "https://registry.npmjs.org/react-table/-/react-table-6.8.6.tgz", - "integrity": "sha1-oK2LSDkxkFLVvvwBJgP7Fh5S7eM=", + "version": "6.9.2", + "resolved": "https://registry.npmjs.org/react-table/-/react-table-6.9.2.tgz", + "integrity": "sha512-sTbNHU8Um0xRtmCd1js873HXnXaMWeBwZoiljuj0l1d44eaqjKyYPK/3HCBbJg1yeE2O5pQJ3Km0tlm9niNL9w==", "requires": { "classnames": "^2.2.5" } }, "react-transition-group": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-2.5.3.tgz", - "integrity": "sha512-2DGFck6h99kLNr8pOFk+z4Soq3iISydwOFeeEVPjTN6+Y01CmvbWmnN02VuTWyFdnRtIDPe+wy2q6Ui8snBPZg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-1.2.1.tgz", + "integrity": "sha512-CWaL3laCmgAFdxdKbhhps+c0HRGF4c+hdM4H23+FI1QBNUyx/AMeIJGWorehPNSaKnQNOAxL7PQmqMu78CDj3Q==", "requires": { - "dom-helpers": "^3.3.1", - "loose-envify": "^1.4.0", - "prop-types": "^15.6.2", - "react-lifecycles-compat": "^3.0.4" + "chain-function": "^1.0.0", + "dom-helpers": "^3.2.0", + "loose-envify": "^1.3.1", + "prop-types": "^15.5.6", + "warning": "^3.0.0" } }, "read-cache": { @@ -7772,9 +7798,9 @@ } }, "realpath-native": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/realpath-native/-/realpath-native-1.0.2.tgz", - "integrity": "sha512-+S3zTvVt9yTntFrBpm7TQmQ3tzpCrnA1a/y+3cUHAc9ZR6aIjG0WNLR+Rj79QpJktY+VeW/TQtFlQ1bzsehI8g==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/realpath-native/-/realpath-native-1.1.0.tgz", + "integrity": "sha512-wlgPA6cCIIg9gKz0fgAPjnzh4yR/LnXovwuo9hvyGvx3h8nX4+/iLZplfUWasXpqD8BdnGnP5njOFjkUwPzvjA==", "dev": true, "requires": { "util.promisify": "^1.0.0" @@ -7911,23 +7937,23 @@ } }, "request-promise-core": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.1.tgz", - "integrity": "sha1-Pu4AssWqgyOc+wTFcA2jb4HNCLY=", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.2.tgz", + "integrity": "sha512-UHYyq1MO8GsefGEt7EprS8UrXsm1TxEvFUX1IMTuSLU2Rh7fTIdFtl8xD7JiEYiWU2dl+NYAjCTksTehQUxPag==", "dev": true, "requires": { - "lodash": "^4.13.1" + "lodash": "^4.17.11" } }, "request-promise-native": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.5.tgz", - "integrity": "sha1-UoF3D2jgyXGeUWP9P6tIIhX0/aU=", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.7.tgz", + "integrity": "sha512-rIMnbBdgNViL37nZ1b3L/VfPOpSi0TqVDQPAvO6U14lMzOLrt5nilxCQqtDKhZeDiW0/hkCXGoQjhgJd/tCh6w==", "dev": true, "requires": { - "request-promise-core": "1.1.1", - "stealthy-require": "^1.1.0", - "tough-cookie": ">=2.3.3" + "request-promise-core": "1.1.2", + "stealthy-require": "^1.1.1", + "tough-cookie": "^2.3.3" } }, "require-directory": { @@ -7954,11 +7980,6 @@ "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=", "dev": true }, - "resize-observer-polyfill": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz", - "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==" - }, "resolve": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.0.tgz", @@ -8323,9 +8344,9 @@ "dev": true }, "scheduler": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.12.0.tgz", - "integrity": "sha512-t7MBR28Akcp4Jm+QoR63XgAi9YgCUmgvDHqf5otgAj4QvdoBE4ImCX0ffehefePPG+aitiYHp0g/mW6s4Tp+dw==", + "version": "0.13.3", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.13.3.tgz", + "integrity": "sha512-UxN5QRYWtpR1egNWzJcVLk8jlegxAugswQc984lD3kU7NuobsO37/sRfbpTdBjtnD5TBNFA2Q2oLV5+UmPSmEQ==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" @@ -8413,6 +8434,12 @@ "requires": { "ms": "2.0.0" } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true } } }, @@ -8445,6 +8472,12 @@ "requires": { "ms": "2.0.0" } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true } } }, @@ -8611,6 +8644,12 @@ "is-extendable": "^0.1.0" } }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -8714,15 +8753,6 @@ "url-parse": "^1.4.3" }, "dependencies": { - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, "faye-websocket": { "version": "0.11.1", "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.1.tgz", @@ -8731,12 +8761,6 @@ "requires": { "websocket-driver": ">=0.5.1" } - }, - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true } } }, @@ -8834,12 +8858,6 @@ "requires": { "ms": "^2.1.1" } - }, - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true } } }, @@ -8866,12 +8884,6 @@ "ms": "^2.1.1" } }, - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true - }, "readable-stream": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.1.1.tgz", @@ -9242,20 +9254,20 @@ } }, "terser": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/terser/-/terser-3.14.1.tgz", - "integrity": "sha512-NSo3E99QDbYSMeJaEk9YW2lTg3qS9V0aKGlb+PlOrei1X02r1wSBHCNX/O+yeTRFSWPKPIGj6MqvvdqV4rnVGw==", + "version": "3.16.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-3.16.1.tgz", + "integrity": "sha512-JDJjgleBROeek2iBcSNzOHLKsB/MdDf+E/BOAJ0Tk9r7p9/fVobfv7LMJ/g/k3v9SXdmjZnIlFd5nfn/Rt0Xow==", "dev": true, "requires": { "commander": "~2.17.1", "source-map": "~0.6.1", - "source-map-support": "~0.5.6" + "source-map-support": "~0.5.9" } }, "terser-webpack-plugin": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.2.1.tgz", - "integrity": "sha512-GGSt+gbT0oKcMDmPx4SRSfJPE1XaN3kQRWG4ghxKQw9cn5G9x6aCKSsgYdvyM0na9NJ4Drv0RG6jbBByZ5CMjw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.2.3.tgz", + "integrity": "sha512-GOK7q85oAb/5kE12fMuLdn2btOS9OBZn4VsecpHDywoUC/jLhSAKOiYo0ezx7ss2EXPMzyEWFoE0s1WLE+4+oA==", "dev": true, "requires": { "cacache": "^11.0.2", @@ -9263,15 +9275,15 @@ "schema-utils": "^1.0.0", "serialize-javascript": "^1.4.0", "source-map": "^0.6.1", - "terser": "^3.8.1", + "terser": "^3.16.1", "webpack-sources": "^1.1.0", "worker-farm": "^1.5.2" } }, "test-exclude": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.0.0.tgz", - "integrity": "sha512-bO3Lj5+qFa9YLfYW2ZcXMOV1pmQvw+KS/DpjqhyX6Y6UZ8zstpZJ+mA2ERkXfpOqhxsJlQiLeVXD3Smsrs6oLw==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.1.0.tgz", + "integrity": "sha512-gwf0S2fFsANC55fSeSqpb8BYk6w3FDvwZxfNjeF6FRgvFa43r+7wRiA/Q0IxoRU37wB/LE8IQ4221BsNucTaCA==", "dev": true, "requires": { "arrify": "^1.0.1", @@ -9280,6 +9292,11 @@ "require-main-filename": "^1.0.1" } }, + "tether": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/tether/-/tether-1.4.5.tgz", + "integrity": "sha512-fysT1Gug2wbRi7a6waeu39yVDwiNtvwj5m9eRD+qZDSHKNghLo6KqP/U3yM2ap6TNUL2skjXGJaJJTJqoC31vw==" + }, "throat": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/throat/-/throat-4.1.0.tgz", @@ -9525,11 +9542,6 @@ "mime-types": "~2.1.18" } }, - "typed-styles": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/typed-styles/-/typed-styles-0.0.7.tgz", - "integrity": "sha512-pzP0PWoZUhsECYjABgCGQlRGL1n7tOHsgwYv3oIiEpJwGhFTuty/YNeduxQYzXXa3Ge5BdT6sHYIQYpl4uJ+5Q==" - }, "typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", @@ -9537,9 +9549,9 @@ "dev": true }, "typescript": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.2.4.tgz", - "integrity": "sha512-0RNDbSdEokBeEAkgNbxJ+BLwSManFy9TeXz8uW+48j/xhEXv1ePME60olyzw2XzUqUBNAYFeJadIqAgNqIACwg==", + "version": "3.3.3333", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.3.3333.tgz", + "integrity": "sha512-JjSKsAfuHBE/fB2oZ8NxtRTk5iGcg6hkYXMnZ3Wc+b2RSqejEqTaem11mHASMnFilHrax3sLK0GDzcJrekZYLw==", "dev": true }, "ua-parser-js": { @@ -9833,9 +9845,9 @@ } }, "warning": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/warning/-/warning-4.0.2.tgz", - "integrity": "sha512-wbTp09q/9C+jJn4KKJfJfoS6VleK/Dti0yqWSm6KMvJ4MRCXFQNapHuJXutJIrWV0Cf4AhTdeIe4qdKHR1+Hug==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/warning/-/warning-3.0.0.tgz", + "integrity": "sha1-MuU3fLVy3kqwR1O9+IIcAe1gW3w=", "requires": { "loose-envify": "^1.0.0" } @@ -9877,15 +9889,15 @@ "dev": true }, "webpack": { - "version": "4.29.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.29.0.tgz", - "integrity": "sha512-pxdGG0keDBtamE1mNvT5zyBdx+7wkh6mh7uzMOo/uRQ/fhsdj5FXkh/j5mapzs060forql1oXqXN9HJGju+y7w==", + "version": "4.29.5", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.29.5.tgz", + "integrity": "sha512-DuWlYUT982c7XVHodrLO9quFbNpVq5FNxLrMUfYUTlgKW0+yPimynYf1kttSQpEneAL1FH3P3OLNgkyImx8qIQ==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-module-context": "1.7.11", - "@webassemblyjs/wasm-edit": "1.7.11", - "@webassemblyjs/wasm-parser": "1.7.11", + "@webassemblyjs/ast": "1.8.3", + "@webassemblyjs/helper-module-context": "1.8.3", + "@webassemblyjs/wasm-edit": "1.8.3", + "@webassemblyjs/wasm-parser": "1.8.3", "acorn": "^6.0.5", "acorn-dynamic-import": "^4.0.0", "ajv": "^6.1.0", @@ -9901,7 +9913,7 @@ "mkdirp": "~0.5.0", "neo-async": "^2.5.0", "node-libs-browser": "^2.0.0", - "schema-utils": "^0.4.4", + "schema-utils": "^1.0.0", "tapable": "^1.1.0", "terser-webpack-plugin": "^1.1.0", "watchpack": "^1.5.0", @@ -9909,27 +9921,17 @@ }, "dependencies": { "acorn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.0.5.tgz", - "integrity": "sha512-i33Zgp3XWtmZBMNvCr4azvOFeWVw1Rk6p3hfi3LUDvIFraOMywb1kAtrbi+med14m4Xfpqm3zRZMT+c0FNE7kg==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.1.0.tgz", + "integrity": "sha512-MW/FjM+IvU9CgBzjO3UIPCE2pyEwUsoFl+VGdczOPEdxfGFjuKny/gN54mOuX7Qxmb9Rg9MCn2oKiSUeW+pjrw==", "dev": true - }, - "schema-utils": { - "version": "0.4.7", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-0.4.7.tgz", - "integrity": "sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ==", - "dev": true, - "requires": { - "ajv": "^6.1.0", - "ajv-keywords": "^3.1.0" - } } } }, "webpack-cli": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-3.2.1.tgz", - "integrity": "sha512-jeJveHwz/vwpJ3B8bxEL5a/rVKIpRNJDsKggfKnxuYeohNDW4Y/wB9N/XHJA093qZyS0r6mYL+/crLsIol4WKA==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-3.2.3.tgz", + "integrity": "sha512-Ik3SjV6uJtWIAN5jp5ZuBMWEAaP5E4V78XJ2nI+paFPh8v4HPSwo/myN0r29Xc/6ZKnd2IdrAlpSgNOu2CDQ6Q==", "dev": true, "requires": { "chalk": "^2.4.1", @@ -9937,10 +9939,8 @@ "enhanced-resolve": "^4.1.0", "findup-sync": "^2.0.0", "global-modules": "^1.0.0", - "global-modules-path": "^2.3.0", "import-local": "^2.0.0", "interpret": "^1.1.0", - "lightercollective": "^0.1.0", "loader-utils": "^1.1.0", "supports-color": "^5.5.0", "v8-compile-cache": "^2.0.2", @@ -9959,12 +9959,12 @@ } }, "webpack-dev-middleware": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.4.0.tgz", - "integrity": "sha512-Q9Iyc0X9dP9bAsYskAVJ/hmIZZQwf/3Sy4xCAZgL5cUkjZmUZLt4l5HpbST/Pdgjn3u6pE7u5OdGd1apgzRujA==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.6.0.tgz", + "integrity": "sha512-oeXA3m+5gbYbDBGo4SvKpAHJJEGMoekUbHgo1RK7CP1sz7/WOSeu/dWJtSTk+rzDCLkPwQhGocgIq6lQqOyOwg==", "dev": true, "requires": { - "memory-fs": "~0.4.1", + "memory-fs": "^0.4.1", "mime": "^2.3.1", "range-parser": "^1.0.3", "webpack-log": "^2.0.0" @@ -9979,9 +9979,9 @@ } }, "webpack-dev-server": { - "version": "3.1.14", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.1.14.tgz", - "integrity": "sha512-mGXDgz5SlTxcF3hUpfC8hrQ11yhAttuUQWf1Wmb+6zo3x6rb7b9mIfuQvAPLdfDRCGRGvakBWHdHOa0I9p/EVQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.2.1.tgz", + "integrity": "sha512-sjuE4mnmx6JOh9kvSbPYw3u/6uxCLHNWfhWaIPwcXWsvWOPN+nc5baq4i9jui3oOBRXGonK9+OI0jVkaz6/rCw==", "dev": true, "requires": { "ansi-html": "0.0.7", @@ -9989,13 +9989,13 @@ "chokidar": "^2.0.0", "compression": "^1.5.2", "connect-history-api-fallback": "^1.3.0", - "debug": "^3.1.0", + "debug": "^4.1.1", "del": "^3.0.0", "express": "^4.16.2", "html-entities": "^1.2.0", - "http-proxy-middleware": "~0.18.0", + "http-proxy-middleware": "^0.19.1", "import-local": "^2.0.0", - "internal-ip": "^3.0.1", + "internal-ip": "^4.2.0", "ip": "^1.1.5", "killable": "^1.0.0", "loglevel": "^1.4.1", @@ -10009,9 +10009,9 @@ "sockjs-client": "1.3.0", "spdy": "^4.0.0", "strip-ansi": "^3.0.0", - "supports-color": "^5.1.0", + "supports-color": "^6.1.0", "url": "^0.11.0", - "webpack-dev-middleware": "3.4.0", + "webpack-dev-middleware": "^3.5.1", "webpack-log": "^2.0.0", "yargs": "12.0.2" }, @@ -10028,6 +10028,15 @@ "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", "dev": true }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, "decamelize": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-2.0.0.tgz", @@ -10046,15 +10055,6 @@ "ansi-regex": "^2.0.0" } }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - }, "yargs": { "version": "12.0.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-12.0.2.tgz", @@ -10246,9 +10246,9 @@ "dev": true }, "write-file-atomic": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.2.tgz", - "integrity": "sha512-s0b6vB3xIVRLWywa6X9TOMA7k9zio0TMOsl9ZnDkliA/cfJlpHXAscj0gbHVJiTdIuAYpIyqS5GW91fqm6gG5g==", + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.1.tgz", + "integrity": "sha512-TGHFeZEZMnv+gBFRfjAcxL5bPHrsGKtnb4qsFAws7/vlh+QfwAaySIw4AXP9ZskTTh5GWu3FLuJhsWVdiJPGvg==", "dev": true, "requires": { "graceful-fs": "^4.1.11", diff --git a/web-console/package.json b/web-console/package.json index 769407559f6..111ad80d3e3 100644 --- a/web-console/package.json +++ b/web-console/package.json @@ -17,9 +17,9 @@ "start": "webpack-dev-server --hot --open" }, "dependencies": { - "@blueprintjs/core": "^3.12.0", - "@types/hjson": "^2.4.0", + "@blueprintjs/core": "1.0.1", "axios": "^0.18.0", + "brace": "^0.11.1", "classnames": "^2.2.6", "d3-array": "^2.0.3", "druid-console": "^0.0.2", @@ -28,16 +28,20 @@ "hjson": "^3.1.2", "lodash.debounce": "^4.0.8", "numeral": "^2.0.6", - "react": "^16.7.0", - "react-dom": "^16.7.0", + "prop-types": "^15.7.2", + "react": "^16.8.3", + "react-ace": "^6.4.0", + "react-addons-css-transition-group": "^15.6.2", + "react-dom": "^16.8.3", "react-router": "^4.3.1", "react-router-dom": "^4.3.1", - "react-table": "^6.8.6", + "react-table": "^6.9.2", "tslib": "^1.9.3" }, "devDependencies": { "@types/classnames": "^2.2.7", "@types/d3-array": "^1.2.4", + "@types/hjson": "^2.4.0", "@types/jest": "^23.3.13", "@types/lodash.debounce": "^4.0.4", "@types/mocha": "^5.2.5", diff --git a/web-console/script/build b/web-console/script/build index 147f0e60f0b..75371226c92 100755 --- a/web-console/script/build +++ b/web-console/script/build @@ -23,6 +23,10 @@ cp -r ./node_modules/druid-console/coordinator-console . cp -r ./node_modules/druid-console/pages . cp ./node_modules/druid-console/index.html . +echo "Copying blueprint assets in..." +sed 's|url("assets|url("/assets|g' "./node_modules/@blueprintjs/core/dist/blueprint.css" > lib/blueprint.css +cp -r "./node_modules/@blueprintjs/core/dist/assets" . + echo "Transpiling ReactTable CSS..." PATH="./target/node:$PATH" ./node_modules/.bin/stylus lib/react-table.styl -o lib/react-table.css diff --git a/web-console/script/clean b/web-console/script/clean index 3464483dbe3..fd51cf86144 100755 --- a/web-console/script/clean +++ b/web-console/script/clean @@ -17,9 +17,10 @@ # limitations under the License. rm -rf \ - lib/react-table.css \ + lib/*.css \ node_modules \ coordinator-console \ pages \ public \ + assets \ index.html diff --git a/web-console/script/cp-to b/web-console/script/cp-to index 81240ff6108..d2bdeb76447 100755 --- a/web-console/script/cp-to +++ b/web-console/script/cp-to @@ -24,3 +24,4 @@ cp -r coordinator-console "$1" cp -r old-console "$1" cp -r pages "$1" cp -r public "$1" +cp -r assets "$1" diff --git a/web-console/src/components/auto-form.tsx b/web-console/src/components/auto-form.tsx index bdbb27a35d7..39e8baa55ce 100644 --- a/web-console/src/components/auto-form.tsx +++ b/web-console/src/components/auto-form.tsx @@ -19,20 +19,8 @@ import { resolveSrv } from 'dns'; import * as React from 'react'; import axios from 'axios'; -import { - FormGroup, - Button, - InputGroup, - Dialog, - NumericInput, - Classes, - Tooltip, - AnchorButton, - TagInput, - Intent, - ButtonGroup, - HTMLSelect -} from "@blueprintjs/core"; +import { InputGroup } from "@blueprintjs/core"; +import { HTMLSelect, FormGroup, NumericInput, TagInput } from "../components/filler"; interface Field { name: string; @@ -67,7 +55,7 @@ export class AutoForm extends React.Component, AutoFormState const { model, onChange } = this.props; return { + onValueChange={(v: any) => { if (isNaN(v)) return; onChange(Object.assign({}, model, { [field.name]: v })); }} @@ -79,7 +67,7 @@ export class AutoForm extends React.Component, AutoFormState const { model, onChange } = this.props; return { + onValueChange={(v: number) => { if (isNaN(v)) return; onChange(Object.assign({}, model, { [field.name]: v })); }} @@ -102,12 +90,14 @@ export class AutoForm extends React.Component, AutoFormState private renderBooleanInput(field: Field): JSX.Element { const { model, onChange } = this.props; return { + onChange={(e: any) => { onChange(Object.assign({}, model, { [field.name]: e.currentTarget.value === "True" })); }} - /> + > + + + } private renderStringArrayInput(field: Field): JSX.Element { @@ -118,7 +108,7 @@ export class AutoForm extends React.Component, AutoFormState onChange={(v: any) => { onChange(Object.assign({}, model, { [field.name]: v })); }} - addOnBlur={true} + fill />; } diff --git a/web-console/src/components/filler.scss b/web-console/src/components/filler.scss new file mode 100644 index 00000000000..b4ee310d49d --- /dev/null +++ b/web-console/src/components/filler.scss @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.pt-select { + &.pt-fill { + flex: 1; + } +} + +.form-group { + margin: 0 0 15px; +} diff --git a/web-console/src/components/filler.tsx b/web-console/src/components/filler.tsx new file mode 100644 index 00000000000..30809b85295 --- /dev/null +++ b/web-console/src/components/filler.tsx @@ -0,0 +1,259 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Button } from '@blueprintjs/core'; +import * as React from 'react'; +import classNames from 'classnames'; +import './filler.scss'; + + +export const IconNames = { + ERROR: "error" as "error", + PLUS: "plus" as "plus", + REFRESH: "refresh" as "refresh", + APPLICATION: "application" as "application", + GRAPH: "graph" as "graph", + MAP: "map" as "map", + TH: "th" as "th", + USER: "user" as "user", + GIT_BRANCH: "git-branch" as "git-branch", + COG: "cog" as "cog", + MULTI_SELECT: "multi-select" as "multi-select", + STACKED_CHART: "stacked-chart" as "stacked-chart", + GANTT_CHART: "gantt-chart" as "gantt-chart", + DATABASE: "database" as "database", + SETTINGS: "settings" as "settings", + HELP: "help" as "help", + SHARE: "share" as "share", + CROSS: "cross" as "cross", + ARROW_LEFT: "arrow-left" as "arrow-left", + CARET_RIGHT: "caret-right" as "caret-right", + TICK: "tick" as "tick", + ARROW_RIGHT: "right-arrow" as "right-arrow", + TRASH: "trash" as "trash", + CARET_DOWN: "caret-down" as "caret-down", + ARROW_UP: "arrow-up" as "arrow-up", + ARROW_DOWN: "arrow-down" as "arrow-down", +}; +export type IconNames = typeof IconNames[keyof typeof IconNames]; + +export class H5 extends React.Component<{}, {}> { + render() { + const { children } = this.props; + return

{children}
; + } +} + +export class Card extends React.Component<{ interactive?: boolean }, {}> { + render() { + const { interactive, children } = this.props; + return
+ {children} +
; + } +} + +export class Icon extends React.Component<{ icon: string, color?: string }, {}> { + render() { + const { color, icon } = this.props; + return ; + } +} + +export class ControlGroup extends React.Component<{}, {}> { + render() { + return
; + } +} + +export class ButtonGroup extends React.Component<{ vertical?: boolean, fixed?: boolean }, {}> { + render() { + const { vertical, fixed, children } = this.props; + return
+ {children} +
; + } +} + +export class Label extends React.Component<{}, {}> { + render() { + const { children } = this.props; + return ; + } +} + +export class FormGroup extends React.Component<{ className?: string, label?: string }, {}> { + render() { + const { className, label, children } = this.props; + return
+ { label ? : null } + {children} +
; + } +} + + +export const Alignment = { + LEFT: "left" as "left", + RIGHT: "right" as "right", +}; +export type Alignment = typeof Alignment[keyof typeof Alignment]; + +export class Navbar extends React.Component<{ className?: string }, {}> { + render() { + const { className, children } = this.props; + return ; + } +} + +export class NavbarGroup extends React.Component<{ align: Alignment }, {}> { + render() { + const { align, children } = this.props; + return
+ {children} +
; + } +} + +export class NavbarDivider extends React.Component<{}, {}> { + render() { + return ; + } +} + +export class HTMLSelect extends React.Component<{ key?: string; style?: any; onChange: any; value: any; fill?: boolean }, {}> { + render() { + const { key, style, onChange, value, fill, children } = this.props; + return
+ +
; + } +} + +export class TextArea extends React.Component<{ className?: string; onChange?: any; value?: string }, {}> { + render() { + const { className, value, onChange } = this.props; + return