From 5ccfdcc48b1c3b20738798f16d1f9ea47211b7eb Mon Sep 17 00:00:00 2001 From: praveev Date: Tue, 28 Feb 2017 10:51:41 -0800 Subject: [PATCH] Fix testDeadlock timeout delay (#3979) * No more singleton. Reduce iterations * Granularities * Fix the delay in the test * Add license header * Remove unused imports * Lot more unused imports from all the rearranging * CR feedback * Move javadoc to constructor --- .../java/io/druid/timeline/DataSegment.java | 7 +- .../druid/benchmark/BoundFilterBenchmark.java | 48 ++-- .../benchmark/ConciseComplementBenchmark.java | 5 +- .../benchmark/FilterPartitionBenchmark.java | 6 +- .../FilteredAggregatorBenchmark.java | 6 +- .../GroupByTypeInterfaceBenchmark.java | 7 +- .../IncrementalIndexRowTypeBenchmark.java | 4 +- .../benchmark/TopNTypeInterfaceBenchmark.java | 14 +- .../IncrementalIndexReadBenchmark.java | 6 +- .../indexing/IndexIngestionBenchmark.java | 4 +- .../indexing/IndexMergeBenchmark.java | 4 +- .../indexing/IndexPersistBenchmark.java | 4 +- .../benchmark/query/GroupByBenchmark.java | 7 +- .../benchmark/query/QueryBenchmarkUtil.java | 1 - .../benchmark/query/SearchBenchmark.java | 12 +- .../benchmark/query/SelectBenchmark.java | 8 +- .../druid/benchmark/query/SqlBenchmark.java | 4 +- .../benchmark/query/TimeseriesBenchmark.java | 12 +- .../druid/benchmark/query/TopNBenchmark.java | 10 +- .../DistinctCountGroupByQueryTest.java | 4 +- .../DistinctCountTimeseriesQueryTest.java | 4 +- .../DistinctCountTopNQueryTest.java | 4 +- .../input/orc/OrcIndexGeneratorJobTest.java | 4 +- .../io/druid/query/scan/ScanQueryEngine.java | 4 +- .../query/scan/MultiSegmentScanQueryTest.java | 4 +- .../TimestampAggregationSelectTest.java | 4 +- .../TimestampGroupByAggregationTest.java | 4 +- .../druid/segment/MapVirtualColumnTest.java | 4 +- .../theta/SketchAggregationTest.java | 8 +- .../SketchAggregationTestWithSimpleData.java | 6 +- .../oldapi/OldApiSketchAggregationTest.java | 6 +- .../ApproximateHistogramAggregationTest.java | 4 +- .../sql/QuantileSqlAggregatorTest.java | 6 +- .../kafka/KafkaIndexTaskClientTest.java | 1 - .../indexing/kafka/KafkaIndexTaskTest.java | 6 +- .../kafka/supervisor/KafkaSupervisorTest.java | 6 +- .../main/java/io/druid/indexer/JobHelper.java | 1 - .../hadoop/DatasourceIngestionSpec.java | 3 +- .../indexer/BatchDeltaIngestionTest.java | 6 +- .../DetermineHashedPartitionsJobTest.java | 6 +- .../indexer/DeterminePartitionsJobTest.java | 4 +- .../indexer/HadoopDruidIndexerConfigTest.java | 12 +- .../indexer/HadoopIngestionSpecTest.java | 4 +- ...cUpdateDatasourcePathSpecSegmentsTest.java | 4 +- .../indexer/IndexGeneratorCombinerTest.java | 4 +- .../druid/indexer/IndexGeneratorJobTest.java | 4 +- .../java/io/druid/indexer/JobHelperTest.java | 4 +- .../hadoop/DatasourceIngestionSpecTest.java | 6 +- .../indexer/path/DatasourcePathSpecTest.java | 4 +- .../indexer/path/GranularityPathSpecTest.java | 21 +- .../updater/HadoopConverterJobTest.java | 6 +- .../IngestSegmentFirehoseFactory.java | 4 +- .../actions/SegmentAllocateActionTest.java | 190 ++++++------- .../indexing/common/task/IndexTaskTest.java | 16 +- .../common/task/RealtimeIndexTaskTest.java | 10 +- .../indexing/common/task/TaskSerdeTest.java | 12 +- .../IngestSegmentFirehoseFactoryTest.java | 4 +- ...estSegmentFirehoseFactoryTimelineTest.java | 4 +- .../indexing/overlord/TaskLifecycleTest.java | 10 +- .../common/granularity/AllGranularity.java | 15 +- .../common/granularity/Granularities.java | 44 +++ .../util/common/granularity/Granularity.java | 246 +--------------- .../common/granularity/GranularityType.java | 262 ++++++++++++++++++ .../common/granularity/NoneGranularity.java | 15 +- .../common/granularity/PeriodGranularity.java | 6 +- .../java/util/common/GranularityTest.java | 22 +- .../src/main/java/io/druid/query/Druids.java | 7 +- .../extraction/TimeFormatExtractionFn.java | 3 +- .../io/druid/query/groupby/GroupByQuery.java | 3 +- .../query/groupby/GroupByQueryHelper.java | 3 +- .../groupby/strategy/GroupByStrategyV2.java | 3 +- .../druid/query/metadata/SegmentAnalyzer.java | 4 +- .../query/search/search/SearchQuery.java | 3 +- .../TimeBoundaryQueryRunnerFactory.java | 4 +- .../io/druid/query/topn/TopNQueryBuilder.java | 3 +- .../incremental/IncrementalIndexSchema.java | 3 +- .../granularity/QueryGranularityTest.java | 89 +++--- .../druid/query/MultiValuedDimensionTest.java | 12 +- .../io/druid/query/QueryRunnerTestHelper.java | 7 +- ...ResultGranularTimestampComparatorTest.java | 7 +- .../HyperUniquesAggregationTest.java | 6 +- ...nalizingFieldAccessPostAggregatorTest.java | 4 +- .../DataSourceMetadataQueryTest.java | 4 +- .../TimeFormatExtractionFnTest.java | 11 +- .../query/groupby/GroupByQueryConfigTest.java | 6 +- .../groupby/GroupByQueryMergeBufferTest.java | 22 +- .../GroupByQueryRunnerFactoryTest.java | 6 +- .../GroupByQueryRunnerFailureTest.java | 16 +- .../query/groupby/GroupByQueryRunnerTest.java | 10 +- .../GroupByTimeseriesQueryRunnerTest.java | 4 +- .../metadata/SegmentMetadataQueryTest.java | 4 +- .../query/search/SearchBinaryFnTest.java | 24 +- .../search/SearchQueryQueryToolChestTest.java | 4 +- .../query/search/SearchQueryRunnerTest.java | 4 +- .../select/MultiSegmentSelectQueryTest.java | 4 +- .../query/select/SelectBinaryFnTest.java | 6 +- .../spec/SpecificSegmentQueryRunnerTest.java | 6 +- .../TimeBoundaryQueryRunnerTest.java | 4 +- .../timeseries/TimeseriesBinaryFnTest.java | 12 +- .../TimeseriesQueryQueryToolChestTest.java | 4 +- .../TimeseriesQueryRunnerBonusTest.java | 6 +- .../timeseries/TimeseriesQueryRunnerTest.java | 15 +- .../query/topn/TopNBinaryFnBenchmark.java | 4 +- .../io/druid/query/topn/TopNBinaryFnTest.java | 16 +- .../topn/TopNQueryQueryToolChestTest.java | 8 +- .../druid/query/topn/TopNQueryRunnerTest.java | 19 +- .../java/io/druid/segment/AppendTest.java | 5 +- .../java/io/druid/segment/EmptyIndexTest.java | 4 +- .../java/io/druid/segment/IndexIOTest.java | 6 +- .../io/druid/segment/IndexMergerTest.java | 33 ++- .../IndexMergerV9CompatibilityTest.java | 4 +- .../IndexMergerV9WithSpatialIndexTest.java | 16 +- .../java/io/druid/segment/MetadataTest.java | 12 +- .../io/druid/segment/SchemalessIndexTest.java | 8 +- .../druid/segment/SchemalessTestFullTest.java | 3 +- .../segment/SchemalessTestSimpleTest.java | 3 +- .../segment/StringDimensionHandlerTest.java | 8 +- .../test/java/io/druid/segment/TestIndex.java | 4 +- .../segment/data/IncrementalIndexTest.java | 22 +- .../druid/segment/filter/BaseFilterTest.java | 4 +- .../filter/SpatialFilterBonusTest.java | 16 +- .../segment/filter/SpatialFilterTest.java | 16 +- .../IncrementalIndexMultiValueSpecTest.java | 4 +- .../IncrementalIndexStorageAdapterTest.java | 18 +- .../incremental/IncrementalIndexTest.java | 4 +- .../OnheapIncrementalIndexBenchmark.java | 7 +- .../OnheapIncrementalIndexTest.java | 6 +- .../incremental/TimeAndDimsCompTest.java | 4 +- .../granularity/ArbitraryGranularitySpec.java | 3 +- .../granularity/UniformGranularitySpec.java | 7 +- .../client/CachingClusteredClientTest.java | 3 +- .../druid/client/CachingQueryRunnerTest.java | 4 +- .../segment/indexing/DataSchemaTest.java | 13 +- .../granularity/ArbitraryGranularityTest.java | 12 +- .../granularity/UniformGranularityTest.java | 26 +- .../segment/realtime/FireDepartmentTest.java | 5 +- .../segment/realtime/RealtimeManagerTest.java | 9 +- .../appenderator/AppenderatorTest.java | 17 +- .../appenderator/AppenderatorTester.java | 4 +- ...DefaultOfflineAppenderatorFactoryTest.java | 4 +- .../FiniteAppenderatorDriverTest.java | 6 +- .../firehose/IngestSegmentFirehoseTest.java | 6 +- .../plumber/RealtimePlumberSchoolTest.java | 6 +- .../segment/realtime/plumber/SinkTest.java | 5 +- .../coordination/ServerManagerTest.java | 27 +- .../main/java/io/druid/cli/DumpSegment.java | 9 +- .../cli/validate/DruidJsonValidatorTest.java | 4 +- .../sql/calcite/rel/DruidQueryBuilder.java | 9 +- .../druid/sql/calcite/CalciteQueryTest.java | 230 +++++++-------- 149 files changed, 1129 insertions(+), 1064 deletions(-) create mode 100644 java-util/src/main/java/io/druid/java/util/common/granularity/Granularities.java create mode 100644 java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java diff --git a/api/src/main/java/io/druid/timeline/DataSegment.java b/api/src/main/java/io/druid/timeline/DataSegment.java index ab30f0e461c..74322c8c3c6 100644 --- a/api/src/main/java/io/druid/timeline/DataSegment.java +++ b/api/src/main/java/io/druid/timeline/DataSegment.java @@ -31,10 +31,9 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Interner; import com.google.common.collect.Interners; import com.google.common.collect.Iterables; - import io.druid.jackson.CommaListJoinDeserializer; import io.druid.jackson.CommaListJoinSerializer; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.SegmentDescriptor; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.ShardSpec; @@ -288,8 +287,8 @@ public class DataSegment implements Comparable { int retVal; - DateTime lhsMonth = Granularity.MONTH.bucketStart(lhs.getInterval().getStart()); - DateTime rhsMonth = Granularity.MONTH.bucketStart(rhs.getInterval().getStart()); + DateTime lhsMonth = Granularities.MONTH.bucketStart(lhs.getInterval().getStart()); + DateTime rhsMonth = Granularities.MONTH.bucketStart(rhs.getInterval().getStart()); retVal = lhsMonth.compareTo(rhsMonth); diff --git a/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java index 37196edae8e..a8a3679d812 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/BoundFilterBenchmark.java @@ -19,11 +19,25 @@ package io.druid.benchmark; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - +import com.google.common.base.Function; +import com.google.common.base.Preconditions; +import com.google.common.collect.FluentIterable; +import io.druid.collections.bitmap.BitmapFactory; +import io.druid.collections.bitmap.ImmutableBitmap; +import io.druid.collections.bitmap.MutableBitmap; +import io.druid.collections.bitmap.RoaringBitmapFactory; +import io.druid.collections.spatial.ImmutableRTree; +import io.druid.extendedset.intset.ConciseSetUtils; +import io.druid.query.filter.BitmapIndexSelector; +import io.druid.query.filter.BoundDimFilter; +import io.druid.query.ordering.StringComparators; +import io.druid.segment.column.BitmapIndex; +import io.druid.segment.data.BitmapSerdeFactory; +import io.druid.segment.data.GenericIndexed; +import io.druid.segment.data.Indexed; +import io.druid.segment.data.RoaringBitmapSerdeFactory; +import io.druid.segment.filter.BoundFilter; +import io.druid.segment.serde.BitmapIndexColumnPartSupplier; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -36,26 +50,10 @@ import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; -import com.google.common.base.Function; -import com.google.common.base.Preconditions; -import com.google.common.collect.FluentIterable; - -import io.druid.collections.bitmap.BitmapFactory; -import io.druid.collections.bitmap.ImmutableBitmap; -import io.druid.collections.bitmap.MutableBitmap; -import io.druid.collections.bitmap.RoaringBitmapFactory; -import io.druid.collections.spatial.ImmutableRTree; -import io.druid.query.filter.BitmapIndexSelector; -import io.druid.query.filter.BoundDimFilter; -import io.druid.query.ordering.StringComparators; -import io.druid.segment.column.BitmapIndex; -import io.druid.segment.data.BitmapSerdeFactory; -import io.druid.segment.data.GenericIndexed; -import io.druid.segment.data.Indexed; -import io.druid.segment.data.RoaringBitmapSerdeFactory; -import io.druid.segment.filter.BoundFilter; -import io.druid.segment.serde.BitmapIndexColumnPartSupplier; -import io.druid.extendedset.intset.ConciseSetUtils; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) @Fork(value = 1) diff --git a/benchmarks/src/main/java/io/druid/benchmark/ConciseComplementBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ConciseComplementBenchmark.java index 20625eace3e..a31a3f713f3 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/ConciseComplementBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/ConciseComplementBenchmark.java @@ -20,8 +20,7 @@ package io.druid.benchmark; -import java.util.concurrent.TimeUnit; - +import io.druid.extendedset.intset.ImmutableConciseSet; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Mode; @@ -31,7 +30,7 @@ import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.infra.Blackhole; -import io.druid.extendedset.intset.ImmutableConciseSet; +import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) public class ConciseComplementBenchmark diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java index 525165dd48f..62de8735200 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java @@ -32,7 +32,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -230,7 +230,7 @@ public class FilterPartitionBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), @@ -508,7 +508,7 @@ public class FilterPartitionBenchmark private Sequence makeCursors(StorageAdapter sa, Filter filter) { - return sa.makeCursors(filter, schemaInfo.getDataInterval(), VirtualColumns.EMPTY, Granularity.ALL, false); + return sa.makeCursors(filter, schemaInfo.getDataInterval(), VirtualColumns.EMPTY, Granularities.ALL, false); } private Sequence> readCursors(Sequence cursors, final Blackhole blackhole) diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java index 701417a0f2d..4c2537fd9de 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java @@ -31,7 +31,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -214,7 +214,7 @@ public class FilteredAggregatorBenchmark query = Druids.newTimeseriesQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) .descending(false) @@ -231,7 +231,7 @@ public class FilteredAggregatorBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(metrics) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), diff --git a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java index cb97bd237d5..d737508eaff 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/GroupByTypeInterfaceBenchmark.java @@ -39,6 +39,7 @@ import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -257,7 +258,7 @@ public class GroupByTypeInterfaceBenchmark .setAggregatorSpecs( queryAggs ) - .setGranularity(Granularity.DAY) + .setGranularity(Granularities.DAY) .build(); GroupByQuery queryA = GroupByQuery @@ -270,7 +271,7 @@ public class GroupByTypeInterfaceBenchmark .setAggregatorSpecs( queryAggs ) - .setGranularity(Granularity.WEEK) + .setGranularity(Granularities.WEEK) .build(); basicQueries.put("nested", queryA); @@ -434,7 +435,7 @@ public class GroupByTypeInterfaceBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), diff --git a/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java index babf26b1e0a..4b900568552 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java @@ -22,7 +22,7 @@ package io.druid.benchmark; import com.google.common.collect.ImmutableMap; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -122,7 +122,7 @@ public class IncrementalIndexRowTypeBenchmark { return new OnheapIncrementalIndex( 0, - Granularity.NONE, + Granularities.NONE, aggs, false, false, diff --git a/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java index 8c029533f89..de8f74111c3 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/TopNTypeInterfaceBenchmark.java @@ -33,7 +33,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -171,7 +171,7 @@ public class TopNTypeInterfaceBenchmark // Use an IdentityExtractionFn to force usage of DimExtractionTopNAlgorithm TopNQueryBuilder queryBuilderString = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension(new ExtractionDimensionSpec("dimSequential", "dimSequential", IdentityExtractionFn.getInstance())) .metric("sumFloatNormal") .intervals(intervalSpec) @@ -180,7 +180,7 @@ public class TopNTypeInterfaceBenchmark // DimExtractionTopNAlgorithm is always used for numeric columns TopNQueryBuilder queryBuilderLong = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("metLongUniform") .metric("sumFloatNormal") .intervals(intervalSpec) @@ -188,7 +188,7 @@ public class TopNTypeInterfaceBenchmark TopNQueryBuilder queryBuilderFloat = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("metFloatNormal") .metric("sumFloatNormal") .intervals(intervalSpec) @@ -206,7 +206,7 @@ public class TopNTypeInterfaceBenchmark TopNQueryBuilder queryBuilderA = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("dimUniform") .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .intervals(intervalSpec) @@ -222,7 +222,7 @@ public class TopNTypeInterfaceBenchmark TopNQueryBuilder queryBuilderA = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("dimUniform") .metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)) .intervals(intervalSpec) @@ -316,7 +316,7 @@ public class TopNTypeInterfaceBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java index a366b7200db..9e54a79bcd1 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java @@ -26,7 +26,7 @@ import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -125,7 +125,7 @@ public class IncrementalIndexReadBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .withRollup(rollup) @@ -203,7 +203,7 @@ public class IncrementalIndexReadBenchmark filter.toFilter(), schemaInfo.getDataInterval(), VirtualColumns.EMPTY, - Granularity.ALL, + Granularities.ALL, false ); } diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java index e6d2d292010..015fe82b80e 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexIngestionBenchmark.java @@ -24,8 +24,8 @@ import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.segment.incremental.IncrementalIndex; @@ -106,7 +106,7 @@ public class IndexIngestionBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .withRollup(rollup) diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java index 29ce0ef2eaa..3702f464305 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java @@ -28,7 +28,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.segment.IndexIO; @@ -163,7 +163,7 @@ public class IndexMergeBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .withRollup(rollup) diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java index d7006df309d..6e376261604 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java @@ -28,7 +28,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.segment.IndexIO; @@ -157,7 +157,7 @@ public class IndexPersistBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .withRollup(rollup) diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java index d1ec09782a8..0a1001e8a7c 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/GroupByBenchmark.java @@ -39,6 +39,7 @@ import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -219,7 +220,7 @@ public class GroupByBenchmark .setAggregatorSpecs( queryAggs ) - .setGranularity(Granularity.DAY) + .setGranularity(Granularities.DAY) .build(); GroupByQuery queryA = GroupByQuery @@ -232,7 +233,7 @@ public class GroupByBenchmark .setAggregatorSpecs( queryAggs ) - .setGranularity(Granularity.WEEK) + .setGranularity(Granularities.WEEK) .build(); basicQueries.put("nested", queryA); @@ -477,7 +478,7 @@ public class GroupByBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .withRollup(withRollup) diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java b/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java index 5da21c149da..662b0ed71e8 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java @@ -20,7 +20,6 @@ package io.druid.benchmark.query; import com.google.common.util.concurrent.ListenableFuture; - import io.druid.java.util.common.guava.Sequence; import io.druid.query.BySegmentQueryRunner; import io.druid.query.FinalizeResultsQueryRunner; diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java index ff438d01eeb..b2584fc9545 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java @@ -33,9 +33,9 @@ import io.druid.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -192,7 +192,7 @@ public class SearchBenchmark return Druids.newSearchQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .query("123"); } @@ -221,7 +221,7 @@ public class SearchBenchmark return Druids.newSearchQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Lists.newArrayList("dimUniform", "dimHyperUnique")) @@ -275,7 +275,7 @@ public class SearchBenchmark return Druids.newSearchQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Lists.newArrayList("dimUniform")) @@ -304,7 +304,7 @@ public class SearchBenchmark return Druids.newSearchQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .query("") .dimensions(Lists.newArrayList("dimUniform")) @@ -390,7 +390,7 @@ public class SearchBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java index 551311a96b3..37c17d7f94c 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java @@ -30,9 +30,9 @@ import io.druid.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -161,7 +161,7 @@ public class SelectBenchmark .dimensionSpecs(DefaultDimensionSpec.toSpec(Arrays.asList())) .metrics(Arrays.asList()) .intervals(intervalSpec) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .descending(false); basicQueries.put("A", queryBuilderA); @@ -247,7 +247,7 @@ public class SelectBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), @@ -271,7 +271,7 @@ public class SelectBenchmark return Sequences.toList(queryResult, Lists.newArrayList()); } - // don't run this benchmark with a query that doesn't use QueryGranularity.ALL, + // don't run this benchmark with a query that doesn't use QueryGranularities.ALL, // this pagination function probably doesn't work correctly in that case. private SelectQuery incrementQueryPagination(SelectQuery query, SelectResultValue prevResult) { diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java index ce877a99d4c..07d4daeb829 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java @@ -29,8 +29,8 @@ import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -190,7 +190,7 @@ public class SqlBenchmark ) ) .setAggregatorSpecs(Arrays.asList(new CountAggregatorFactory("c"))) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .build(); sqlQuery = "SELECT\n" diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java index d9bc0124e98..a6a63c8a66c 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java @@ -29,9 +29,9 @@ import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -167,7 +167,7 @@ public class TimeseriesBenchmark TimeseriesQuery queryA = Druids.newTimeseriesQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) .descending(false) @@ -187,7 +187,7 @@ public class TimeseriesBenchmark TimeseriesQuery timeFilterQuery = Druids.newTimeseriesQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) .descending(false) @@ -207,7 +207,7 @@ public class TimeseriesBenchmark TimeseriesQuery timeFilterQuery = Druids.newTimeseriesQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) .descending(false) @@ -224,7 +224,7 @@ public class TimeseriesBenchmark TimeseriesQuery timeFilterQuery = Druids.newTimeseriesQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(intervalSpec) .aggregators(queryAggs) .descending(false) @@ -314,7 +314,7 @@ public class TimeseriesBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java index a85b195835f..5e8bab4957f 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java @@ -30,9 +30,9 @@ import io.druid.collections.StupidPool; import io.druid.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -167,7 +167,7 @@ public class TopNBenchmark TopNQueryBuilder queryBuilderA = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("dimSequential") .metric("sumFloatNormal") .intervals(intervalSpec) @@ -183,7 +183,7 @@ public class TopNBenchmark TopNQueryBuilder queryBuilderA = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("dimUniform") .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .intervals(intervalSpec) @@ -199,7 +199,7 @@ public class TopNBenchmark TopNQueryBuilder queryBuilderA = new TopNQueryBuilder() .dataSource("blah") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("dimUniform") .metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)) .intervals(intervalSpec) @@ -294,7 +294,7 @@ public class TopNBenchmark { return new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(schemaInfo.getAggsArray()) .withDimensionsSpec(new DimensionsSpec(null, null, null)) .build(), diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java index 531cca4413d..36a8e378a0c 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -57,7 +57,7 @@ public class DistinctCountGroupByQueryTest final GroupByQueryRunnerFactory factory = GroupByQueryRunnerTest.makeQueryRunnerFactory(config); IncrementalIndex index = new OnheapIncrementalIndex( - 0, Granularity.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 + 0, Granularities.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 ); String visitor_id = "visitor_id"; String client_type = "client_type"; diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java index 678e17afd04..08b1a8bc7a4 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunnerTestHelper; @@ -52,7 +52,7 @@ public class DistinctCountTimeseriesQueryTest TimeseriesQueryEngine engine = new TimeseriesQueryEngine(); IncrementalIndex index = new OnheapIncrementalIndex( - 0, Granularity.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 + 0, Granularities.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 ); String visitor_id = "visitor_id"; String client_type = "client_type"; diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index eb801ed06d6..b6ebb11d8bd 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -25,7 +25,7 @@ import com.google.common.collect.Lists; import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; @@ -68,7 +68,7 @@ public class DistinctCountTopNQueryTest ); IncrementalIndex index = new OnheapIncrementalIndex( - 0, Granularity.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 + 0, Granularities.SECOND, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 ); String visitor_id = "visitor_id"; String client_type = "client_type"; diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java index 4066691413d..f56479cef5b 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -37,7 +37,7 @@ import io.druid.indexer.HadoopyShardSpec; import io.druid.indexer.IndexGeneratorJob; import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -189,7 +189,7 @@ public class OrcIndexGeneratorJobTest ), aggs, new UniformGranularitySpec( - Granularity.DAY, Granularity.NONE, ImmutableList.of(this.interval) + Granularities.DAY, Granularities.NONE, ImmutableList.of(this.interval) ), mapper ), diff --git a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java index a439f2fbf12..6253992952c 100644 --- a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java +++ b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java @@ -23,7 +23,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.java.util.common.ISE; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.BaseSequence; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -113,7 +113,7 @@ public class ScanQueryEngine filter, intervals.get(0), VirtualColumns.EMPTY, - Granularity.ALL, + Granularities.ALL, query.isDescending() ), new Function>() diff --git a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java index 3619cb6aaa4..81c485e8f3a 100644 --- a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java +++ b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.io.CharSource; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.MergeSequence; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -144,7 +144,7 @@ public class MultiSegmentScanQueryTest { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) - .withQueryGranularity(Granularity.HOUR) + .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); return new OnheapIncrementalIndex(schema, true, maxRowCount); diff --git a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java index a597e8a9d61..8ec8362fc00 100644 --- a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java +++ b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampAggregationSelectTest.java @@ -25,7 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.io.Resources; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Result; @@ -149,7 +149,7 @@ public class TimestampAggregationSelectTest recordParser, aggregator, 0, - Granularity.MONTH, + Granularities.MONTH, 100, Resources.toString(Resources.getResource("select.json"), Charsets.UTF_8) ); diff --git a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java index 23c362f034a..42a780a511f 100644 --- a/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java +++ b/extensions-contrib/time-min-max/src/test/java/io/druid/query/aggregation/TimestampGroupByAggregationTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.groupby.GroupByQueryConfig; @@ -175,7 +175,7 @@ public class TimestampGroupByAggregationTest recordParser, aggregator, 0, - Granularity.MONTH, + Granularities.MONTH, 100, groupBy ); diff --git a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java index bd8229438c3..b5ff60aa722 100644 --- a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java @@ -28,7 +28,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; @@ -80,7 +80,7 @@ public class MapVirtualColumnTest final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .build(); final IncrementalIndex index = new OnheapIncrementalIndex(schema, true, 10000); diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java index 74360ee176c..e6167c15149 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java @@ -30,7 +30,7 @@ import com.yahoo.sketches.theta.Sketches; import com.yahoo.sketches.theta.Union; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; @@ -94,7 +94,7 @@ public class SketchAggregationTest readFileFromClasspathAsString("sketch_test_data_record_parser.json"), readFileFromClasspathAsString("sketch_test_data_aggregators.json"), 0, - Granularity.NONE, + Granularities.NONE, 5, readFileFromClasspathAsString("sketch_test_data_group_by_query.json") ); @@ -135,7 +135,7 @@ public class SketchAggregationTest + " }" + "]", 0, - Granularity.NONE, + Granularities.NONE, 5, readFileFromClasspathAsString("simple_test_data_group_by_query.json") ); @@ -333,7 +333,7 @@ public class SketchAggregationTest readFileFromClasspathAsString("simple_test_data_record_parser.json"), readFileFromClasspathAsString("simple_test_data_aggregators.json"), 0, - Granularity.NONE, + Granularities.NONE, 5, readFileFromClasspathAsString("retention_test_data_group_by_query.json") ); diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTestWithSimpleData.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTestWithSimpleData.java index 2757f69df1a..2a1d33a291a 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTestWithSimpleData.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationTestWithSimpleData.java @@ -26,7 +26,7 @@ import com.google.common.collect.Lists; import com.google.common.io.Files; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Result; @@ -99,7 +99,7 @@ public class SketchAggregationTestWithSimpleData readFileFromClasspathAsString("simple_test_data_aggregators.json"), s1, 0, - Granularity.NONE, + Granularities.NONE, 5000 ); @@ -110,7 +110,7 @@ public class SketchAggregationTestWithSimpleData readFileFromClasspathAsString("simple_test_data_aggregators.json"), s2, 0, - Granularity.NONE, + Granularities.NONE, 5000 ); } diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java index ab2df701ea4..7821876e029 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.Files; import io.druid.data.input.MapBasedRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; @@ -86,7 +86,7 @@ public class OldApiSketchAggregationTest readFileFromClasspathAsString("simple_test_data_record_parser.json"), readFileFromClasspathAsString("oldapi/old_simple_test_data_aggregators.json"), 0, - Granularity.NONE, + Granularities.NONE, 5, readFileFromClasspathAsString("oldapi/old_simple_test_data_group_by_query.json") ); @@ -118,7 +118,7 @@ public class OldApiSketchAggregationTest readFileFromClasspathAsString("sketch_test_data_record_parser.json"), readFileFromClasspathAsString("oldapi/old_sketch_test_data_aggregators.json"), 0, - Granularity.NONE, + Granularities.NONE, 5, readFileFromClasspathAsString("oldapi/old_sketch_test_data_group_by_query.json") ); diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java index e980acc2543..40ae210f0e9 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java @@ -21,7 +21,7 @@ package io.druid.query.aggregation.histogram; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; @@ -135,7 +135,7 @@ public class ApproximateHistogramAggregationTest parseSpec, metricSpec, 0, - Granularity.NONE, + Granularities.NONE, 50000, query ); diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 9a55376bd6e..47698cd8ea6 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.aggregation.AggregatorFactory; @@ -176,7 +176,7 @@ public class QuantileSqlAggregatorTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(ImmutableList.of( new ApproximateHistogramAggregatorFactory("a0:agg", "m1", null, null, null, null), new ApproximateHistogramAggregatorFactory("a2:agg", "m1", 200, null, null, null), @@ -238,7 +238,7 @@ public class QuantileSqlAggregatorTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(ImmutableList.of( new ApproximateHistogramFoldingAggregatorFactory("a0:agg", "hist_m1", null, null, null, null), new ApproximateHistogramFoldingAggregatorFactory("a2:agg", "hist_m1", 200, null, null, null), diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index 660ea0d1b7f..396524005b4 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -36,7 +36,6 @@ import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IAE; - import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMockSupport; diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index dbd65f7f3e8..a778214a537 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -74,7 +74,7 @@ import io.druid.indexing.test.TestDataSegmentKiller; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.ISE; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.metadata.EntryExistsException; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; @@ -199,7 +199,7 @@ public class KafkaIndexTaskTest Map.class ), new AggregatorFactory[]{new CountAggregatorFactory("rows")}, - new UniformGranularitySpec(Granularity.DAY, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null), objectMapper ); } @@ -1607,7 +1607,7 @@ public class KafkaIndexTaskTest ImmutableList.of( new LongSumAggregatorFactory("rows", "rows") ) - ).granularity(Granularity.ALL) + ).granularity(Granularities.ALL) .intervals("0000/3000") .build(); diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index 98fe3b8d2e3..d22d1772810 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -58,7 +58,7 @@ import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.supervisor.SupervisorReport; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -1749,8 +1749,8 @@ public class KafkaSupervisorTest extends EasyMockSupport ), new AggregatorFactory[]{new CountAggregatorFactory("rows")}, new UniformGranularitySpec( - Granularity.HOUR, - Granularity.NONE, + Granularities.HOUR, + Granularities.NONE, ImmutableList.of() ), objectMapper diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java index 8cb750422ce..7ab1105ab53 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java @@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.common.io.OutputSupplier; - import io.druid.indexer.updater.HadoopDruidConverterConfig; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java index 38290345c52..66546330fa4 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.filter.DimFilter; import io.druid.timeline.DataSegment; @@ -76,7 +77,7 @@ public class DatasourceIngestionSpec this.segments = segments; this.filter = filter; - this.granularity = granularity == null ? Granularity.NONE : granularity; + this.granularity = granularity == null ? Granularities.NONE : granularity; this.dimensions = dimensions; this.metrics = metrics; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java index 04037f81e24..00300c32ba4 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java @@ -35,7 +35,7 @@ import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -323,7 +323,7 @@ public class BatchDeltaIngestionTest ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), null, - Granularity.NONE + Granularities.NONE ); List rows = Lists.newArrayList(); @@ -358,7 +358,7 @@ public class BatchDeltaIngestionTest new HyperUniquesAggregatorFactory("unique_hosts", "host2") }, new UniformGranularitySpec( - Granularity.DAY, Granularity.NONE, ImmutableList.of(INTERVAL_FULL) + Granularities.DAY, Granularities.NONE, ImmutableList.of(INTERVAL_FULL) ), MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java index 2edbef925c6..e6579c91c0b 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java @@ -27,7 +27,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.partitions.HashedPartitionsSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -137,8 +137,8 @@ public class DetermineHashedPartitionsJobTest ), new AggregatorFactory[]{new DoubleSumAggregatorFactory("index", "index")}, new UniformGranularitySpec( - Granularity.DAY, - Granularity.NONE, + Granularities.DAY, + Granularities.NONE, ImmutableList.of(new Interval(interval)) ), HadoopDruidIndexerConfig.JSON_MAPPER diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java index 0802b92e78f..9fc2e8eb040 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java @@ -27,7 +27,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -235,7 +235,7 @@ public class DeterminePartitionsJobTest ), new AggregatorFactory[]{new LongSumAggregatorFactory("visited_num", "visited_num")}, new UniformGranularitySpec( - Granularity.DAY, Granularity.NONE, ImmutableList.of(new Interval(interval)) + Granularities.DAY, Granularities.NONE, ImmutableList.of(new Interval(interval)) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java index a6e4b98f039..bf6e75c4235 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -27,7 +27,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; @@ -202,8 +202,8 @@ public class HadoopDruidIndexerConfigTest null, new AggregatorFactory[0], new UniformGranularitySpec( - Granularity.MINUTE, - Granularity.MINUTE, + Granularities.MINUTE, + Granularities.MINUTE, ImmutableList.of(new Interval("2010-01-01/P1D")) ), jsonMapper @@ -244,7 +244,7 @@ public class HadoopDruidIndexerConfigTest ); final long timestamp = new DateTime("2010-01-01T01:00:01").getMillis(); final Bucket expectedBucket = config.getBucket(new MapBasedInputRow(timestamp, dims, values)).get(); - final long nextBucketTimestamp = Granularity.MINUTE.bucketEnd(new DateTime(timestamp)).getMillis(); + final long nextBucketTimestamp = Granularities.MINUTE.bucketEnd(new DateTime(timestamp)).getMillis(); // check that all rows having same set of dims and truncated timestamp hash to same bucket for (int i = 0; timestamp + i < nextBucketTimestamp; i++) { Assert.assertEquals( @@ -264,8 +264,8 @@ public class HadoopDruidIndexerConfigTest null, new AggregatorFactory[0], new UniformGranularitySpec( - Granularity.MINUTE, - Granularity.MINUTE, + Granularities.MINUTE, + Granularities.MINUTE, ImmutableList.of(new Interval("2010-01-01/P1D")) ), jsonMapper diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java index be9dabcad94..1750bae6b8a 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java @@ -30,7 +30,7 @@ import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; import io.druid.indexer.updater.MetadataStorageUpdaterJobSpec; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.segment.indexing.granularity.UniformGranularitySpec; @@ -84,7 +84,7 @@ public class HadoopIngestionSpecTest Assert.assertEquals( "getSegmentGranularity", - Granularity.HOUR, + Granularities.HOUR, granularitySpec.getSegmentGranularity() ); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java index c1782d16548..4ab0b9f30a3 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java @@ -32,7 +32,7 @@ import io.druid.indexer.path.PathSpec; import io.druid.indexer.path.StaticPathSpec; import io.druid.indexer.path.UsedSegmentLister; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; @@ -235,7 +235,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest null, new AggregatorFactory[0], new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, ImmutableList.of( new Interval("2010-01-01/P1D") diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java index 1c62ed58d19..087c13d790c 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java @@ -28,7 +28,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -78,7 +78,7 @@ public class IndexGeneratorCombinerTest new HyperUniquesAggregatorFactory("unique_hosts", "host") }, new UniformGranularitySpec( - Granularity.DAY, Granularity.NONE, ImmutableList.of(Interval.parse("2010/2011")) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2010/2011")) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java index 37dd5b72f8a..250b9dd8b04 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java @@ -32,7 +32,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -490,7 +490,7 @@ public class IndexGeneratorJobTest ), aggs, new UniformGranularitySpec( - Granularity.DAY, Granularity.NONE, ImmutableList.of(this.interval) + Granularities.DAY, Granularities.NONE, ImmutableList.of(this.interval) ), mapper ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java index a16369261ff..8af6c470ff6 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java @@ -25,7 +25,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -85,7 +85,7 @@ public class JobHelperTest ), new AggregatorFactory[]{new LongSumAggregatorFactory("visited_num", "visited_num")}, new UniformGranularitySpec( - Granularity.DAY, Granularity.NONE, ImmutableList.of(this.interval) + Granularities.DAY, Granularities.NONE, ImmutableList.of(this.interval) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java index 8f5632996e9..3c99203d933 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java @@ -22,7 +22,7 @@ package io.druid.indexer.hadoop; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.TestHelper; import io.druid.timeline.DataSegment; @@ -49,7 +49,7 @@ public class DatasourceIngestionSpecTest null, null, new SelectorDimFilter("dim", "value", null), - Granularity.DAY, + Granularities.DAY, Lists.newArrayList("d1", "d2"), Lists.newArrayList("m1", "m2", "m3"), false @@ -133,7 +133,7 @@ public class DatasourceIngestionSpecTest ) ), new SelectorDimFilter("dim", "value", null), - Granularity.DAY, + Granularities.DAY, Lists.newArrayList("d1", "d2"), Lists.newArrayList("m1", "m2", "m3"), true diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java index 8f47a8263d4..1766c73b5d6 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java @@ -44,7 +44,7 @@ import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -276,7 +276,7 @@ public class DatasourcePathSpecTest new LongSumAggregatorFactory("visited_sum", "visited") }, new UniformGranularitySpec( - Granularity.DAY, Granularity.NONE, ImmutableList.of(Interval.parse("2000/3000")) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2000/3000")) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java index 2762430a471..ba86ce2708d 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java @@ -29,6 +29,7 @@ import io.druid.indexer.HadoopIOConfig; import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.HadoopTuningConfig; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.query.aggregation.AggregatorFactory; @@ -119,7 +120,7 @@ public class GranularityPathSpecTest @Test public void testSetDataGranularity() { - Granularity granularity = Granularity.DAY; + Granularity granularity = Granularities.DAY; granularityPathSpec.setDataGranularity(granularity); Assert.assertEquals(granularity, granularityPathSpec.getDataGranularity()); } @@ -127,13 +128,13 @@ public class GranularityPathSpecTest @Test public void testSerdeCustomInputFormat() throws Exception { - testSerde("/test/path", "*.test", "pat_pat", Granularity.SECOND, TextInputFormat.class); + testSerde("/test/path", "*.test", "pat_pat", Granularities.SECOND, TextInputFormat.class); } @Test public void testSerdeNoInputFormat() throws Exception { - testSerde("/test/path", "*.test", "pat_pat", Granularity.SECOND, null); + testSerde("/test/path", "*.test", "pat_pat", Granularities.SECOND, null); } @Test @@ -146,8 +147,8 @@ public class GranularityPathSpecTest null, new AggregatorFactory[0], new UniformGranularitySpec( - Granularity.DAY, - Granularity.MINUTE, + Granularities.DAY, + Granularities.MINUTE, ImmutableList.of(new Interval("2015-11-06T00:00Z/2015-11-07T00:00Z")) ), jsonMapper @@ -156,7 +157,7 @@ public class GranularityPathSpecTest DEFAULT_TUNING_CONFIG ); - granularityPathSpec.setDataGranularity(Granularity.HOUR); + granularityPathSpec.setDataGranularity(Granularities.HOUR); granularityPathSpec.setFilePattern(".*"); granularityPathSpec.setInputFormat(TextInputFormat.class); @@ -197,8 +198,8 @@ public class GranularityPathSpecTest null, new AggregatorFactory[0], new UniformGranularitySpec( - Granularity.DAY, - Granularity.ALL, + Granularities.DAY, + Granularities.ALL, ImmutableList.of(new Interval("2015-01-01T11Z/2015-01-02T05Z")) ), jsonMapper @@ -207,7 +208,7 @@ public class GranularityPathSpecTest DEFAULT_TUNING_CONFIG ); - granularityPathSpec.setDataGranularity(Granularity.HOUR); + granularityPathSpec.setDataGranularity(Granularities.HOUR); granularityPathSpec.setPathFormat("yyyy/MM/dd/HH"); granularityPathSpec.setFilePattern(".*"); granularityPathSpec.setInputFormat(TextInputFormat.class); @@ -243,7 +244,7 @@ public class GranularityPathSpecTest { final PeriodGranularity pt2S = new PeriodGranularity(new Period("PT2S"), null, DateTimeZone.UTC); Assert.assertNotEquals("\"SECOND\"", jsonMapper.writeValueAsString(pt2S)); - final Granularity pt1S = Granularity.SECOND; + final Granularity pt1S = Granularities.SECOND; Assert.assertEquals("\"SECOND\"", jsonMapper.writeValueAsString(pt1S)); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java index bffee97d06c..4b9adba54c4 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java @@ -42,7 +42,7 @@ import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; import io.druid.indexer.SQLMetadataStorageUpdaterJobHandler; import io.druid.java.util.common.FileUtils; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.metadata.MetadataSegmentManagerConfig; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; @@ -175,8 +175,8 @@ public class HadoopConverterJobTest new HyperUniquesAggregatorFactory("quality_uniques", "quality") }, new UniformGranularitySpec( - Granularity.MONTH, - Granularity.DAY, + Granularities.MONTH, + Granularities.DAY, ImmutableList.of(interval) ), HadoopDruidIndexerConfig.JSON_MAPPER diff --git a/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java b/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java index ac9fe89d5eb..351fa1b9b65 100644 --- a/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactory.java @@ -34,11 +34,11 @@ import com.metamx.emitter.EmittingLogger; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.impl.InputRowParser; -import io.druid.java.util.common.granularity.Granularity; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.TaskToolboxFactory; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.task.NoopTask; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.filter.DimFilter; import io.druid.segment.IndexIO; @@ -274,7 +274,7 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactoryof( new LongSumAggregatorFactory(metric, metric) ) - ).granularity(Granularity.ALL) + ).granularity(Granularities.ALL) .intervals("2000/3000") .build(); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java index f23f4f392ef..3f26def76bc 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java @@ -30,7 +30,7 @@ import io.druid.guice.FirehoseModule; import io.druid.indexer.HadoopIOConfig; import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexing.common.TestUtils; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -180,7 +180,7 @@ public class TaskSerdeTest null, new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")}, new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D")) ), @@ -245,7 +245,7 @@ public class TaskSerdeTest null, new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")}, new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D")) ), @@ -436,7 +436,7 @@ public class TaskSerdeTest "foo", null, new AggregatorFactory[0], - new UniformGranularitySpec(Granularity.HOUR, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper ), new RealtimeIOConfig( @@ -487,7 +487,7 @@ public class TaskSerdeTest .getTuningConfig().getWindowPeriod() ); Assert.assertEquals( - Granularity.HOUR, + Granularities.HOUR, task.getRealtimeIngestionSchema().getDataSchema().getGranularitySpec().getSegmentGranularity() ); Assert.assertTrue(task.getRealtimeIngestionSchema().getTuningConfig().isReportParseExceptions()); @@ -723,7 +723,7 @@ public class TaskSerdeTest new HadoopIngestionSpec( new DataSchema( "foo", null, new AggregatorFactory[0], new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P1D")) ), diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 378a6da07a2..b7431f6f3ab 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -41,7 +41,6 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.guice.GuiceAnnotationIntrospector; import io.druid.guice.GuiceInjectableValues; import io.druid.guice.GuiceInjectors; @@ -55,6 +54,7 @@ import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.overlord.HeapMemoryTaskStorage; import io.druid.indexing.overlord.TaskLockbox; import io.druid.indexing.overlord.supervisor.SupervisorManager; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; import io.druid.query.aggregation.AggregatorFactory; @@ -132,7 +132,7 @@ public class IngestSegmentFirehoseFactoryTest } ); final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMinTimestamp(JodaUtils.MIN_INSTANT) .withDimensionsSpec(ROW_PARSER) .withMetrics( diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index 14121db11a8..165b29fc7fe 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -40,7 +40,6 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.indexing.common.SegmentLoaderFactory; import io.druid.indexing.common.TaskToolboxFactory; import io.druid.indexing.common.TestUtils; @@ -50,6 +49,7 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.filter.NoopDimFilter; @@ -211,7 +211,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest { final File persistDir = new File(tmpDir, UUID.randomUUID().toString()); final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMinTimestamp(JodaUtils.MIN_INSTANT) .withDimensionsSpec(ROW_PARSER) .withMetrics( diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 0ba9aa69f51..a048c066396 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -70,7 +70,7 @@ import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Comparators; import io.druid.metadata.SQLMetadataStorageActionHandlerFactory; import io.druid.metadata.TestDerbyConnector; @@ -646,7 +646,7 @@ public class TaskLifecycleTest null, new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")}, new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D")) ), @@ -704,7 +704,7 @@ public class TaskLifecycleTest null, new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")}, new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P1D")) ), @@ -1063,7 +1063,7 @@ public class TaskLifecycleTest null, new AggregatorFactory[]{new DoubleSumAggregatorFactory("met", "met")}, new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D")) ), @@ -1168,7 +1168,7 @@ public class TaskLifecycleTest "test_ds", null, new AggregatorFactory[]{new LongSumAggregatorFactory("count", "rows")}, - new UniformGranularitySpec(Granularity.DAY, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null), mapper ); RealtimeIOConfig realtimeIOConfig = new RealtimeIOConfig( diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java index 480401ffab2..59a84ae6ed1 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java @@ -27,7 +27,7 @@ import org.joda.time.format.DateTimeFormatter; /** * AllGranularty buckets everything into a single bucket */ -public final class AllGranularity extends Granularity +public class AllGranularity extends Granularity { // These constants are from JodaUtils in druid-common. // Creates circular dependency. @@ -38,14 +38,11 @@ public final class AllGranularity extends Granularity private final DateTime maxDateTime = new DateTime(MAX_INSTANT); private final DateTime minDateTime = new DateTime(MIN_INSTANT); - private static final AllGranularity INSTANCE = new AllGranularity(); - - private AllGranularity() {} - - public static AllGranularity getInstance() - { - return INSTANCE; - } + /** + * This constructor is public b/c it is serialized and deserialized + * based on type in GranularityModule + */ + public AllGranularity() {} @Override public DateTimeFormatter getFormatter(Formatter type) diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularities.java b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularities.java new file mode 100644 index 00000000000..599110550f9 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularities.java @@ -0,0 +1,44 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.granularity; + +/** + * This class was created b/c sometimes static initializers of a class that use a subclass can deadlock. + * See: #2979, #3979 + */ +public class Granularities +{ + public static final Granularity SECOND = GranularityType.SECOND.getDefaultGranularity(); + public static final Granularity MINUTE = GranularityType.MINUTE.getDefaultGranularity(); + public static final Granularity FIVE_MINUTE = GranularityType.FIVE_MINUTE.getDefaultGranularity(); + public static final Granularity TEN_MINUTE = GranularityType.TEN_MINUTE.getDefaultGranularity(); + public static final Granularity FIFTEEN_MINUTE = GranularityType.FIFTEEN_MINUTE.getDefaultGranularity(); + public static final Granularity THIRTY_MINUTE = GranularityType.THIRTY_MINUTE.getDefaultGranularity(); + public static final Granularity HOUR = GranularityType.HOUR.getDefaultGranularity(); + public static final Granularity SIX_HOUR = GranularityType.SIX_HOUR.getDefaultGranularity(); + public static final Granularity DAY = GranularityType.DAY.getDefaultGranularity(); + public static final Granularity WEEK = GranularityType.WEEK.getDefaultGranularity(); + public static final Granularity MONTH = GranularityType.MONTH.getDefaultGranularity(); + public static final Granularity QUARTER = GranularityType.QUARTER.getDefaultGranularity(); + public static final Granularity YEAR = GranularityType.YEAR.getDefaultGranularity(); + public static final Granularity ALL = GranularityType.ALL.getDefaultGranularity(); + public static final Granularity NONE = GranularityType.NONE.getDefaultGranularity(); + +} diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java index 73e032e0748..acd21e8ed23 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java @@ -20,7 +20,6 @@ package io.druid.java.util.common.granularity; import com.fasterxml.jackson.annotation.JsonCreator; -import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import io.druid.java.util.common.Cacheable; @@ -28,7 +27,6 @@ import io.druid.java.util.common.IAE; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; -import org.joda.time.Period; import org.joda.time.format.DateTimeFormatter; import java.util.Collections; @@ -42,32 +40,6 @@ import java.util.regex.Pattern; public abstract class Granularity implements Cacheable { - - public static final Granularity SECOND = GranularityType.SECOND.defaultGranularity; - public static final Granularity MINUTE = GranularityType.MINUTE.defaultGranularity; - public static final Granularity FIVE_MINUTE = GranularityType.FIVE_MINUTE.defaultGranularity; - public static final Granularity TEN_MINUTE = GranularityType.TEN_MINUTE.defaultGranularity; - public static final Granularity FIFTEEN_MINUTE = GranularityType.FIFTEEN_MINUTE.defaultGranularity; - public static final Granularity THIRTY_MINUTE = GranularityType.THIRTY_MINUTE.defaultGranularity; - public static final Granularity HOUR = GranularityType.HOUR.defaultGranularity; - public static final Granularity SIX_HOUR = GranularityType.SIX_HOUR.defaultGranularity; - public static final Granularity DAY = GranularityType.DAY.defaultGranularity; - public static final Granularity WEEK = GranularityType.WEEK.defaultGranularity; - public static final Granularity MONTH = GranularityType.MONTH.defaultGranularity; - public static final Granularity QUARTER = GranularityType.QUARTER.defaultGranularity; - public static final Granularity YEAR = GranularityType.YEAR.defaultGranularity; - public static final Granularity ALL = GranularityType.ALL.defaultGranularity; - public static final Granularity NONE = GranularityType.NONE.defaultGranularity; - - /** - * For a select subset of granularites, users can specify them directly as string. - * These are "predefined granularities". - * For all others, the users will have to use "Duration" or "Period" type granularities - */ - static final List PREDEFINED_GRANULARITIES = ImmutableList.of( - SECOND, MINUTE, FIVE_MINUTE, TEN_MINUTE, FIFTEEN_MINUTE, THIRTY_MINUTE, - HOUR, SIX_HOUR, DAY, WEEK, MONTH, QUARTER, YEAR); - /** * Default patterns for parsing paths. */ @@ -81,7 +53,7 @@ public abstract class Granularity implements Cacheable @JsonCreator public static Granularity fromString(String str) { - return GranularityType.valueOf(str.toUpperCase()).defaultGranularity; + return GranularityType.valueOf(str.toUpperCase()).getDefaultGranularity(); } /** @@ -219,222 +191,6 @@ public abstract class Granularity implements Cacheable LOWER_DEFAULT } - /** - * Only to create a mapping of the granularity and all the supported file patterns - * namely: default, lowerDefault and hive. - */ - public enum GranularityType - { - SECOND( - "'dt'=yyyy-MM-dd-HH-mm-ss", - "'y'=yyyy/'m'=MM/'d'=dd/'h'=HH/'m'=mm/'s'=ss", - "'y'=yyyy/'m'=MM/'d'=dd/'H'=HH/'M'=mm/'S'=ss", - 6, - "PT1S" - ), - MINUTE( - "'dt'=yyyy-MM-dd-HH-mm", - "'y'=yyyy/'m'=MM/'d'=dd/'h'=HH/'m'=mm", - "'y'=yyyy/'m'=MM/'d'=dd/'H'=HH/'M'=mm", - 5, - "PT1M" - ), - FIVE_MINUTE(MINUTE, "PT5M"), - TEN_MINUTE(MINUTE, "PT10M"), - FIFTEEN_MINUTE(MINUTE, "PT15M"), - THIRTY_MINUTE(MINUTE, "PT30M"), - HOUR( - "'dt'=yyyy-MM-dd-HH", - "'y'=yyyy/'m'=MM/'d'=dd/'h'=HH", - "'y'=yyyy/'m'=MM/'d'=dd/'H'=HH", - 4, - "PT1H" - ), - SIX_HOUR(HOUR, "PT6H"), - DAY( - "'dt'=yyyy-MM-dd", - "'y'=yyyy/'m'=MM/'d'=dd", - "'y'=yyyy/'m'=MM/'d'=dd", - 3, - "P1D" - ), - WEEK(DAY, "P1W"), - MONTH( - "'dt'=yyyy-MM", - "'y'=yyyy/'m'=MM", - "'y'=yyyy/'m'=MM", - 2, - "P1M" - ), - QUARTER(MONTH, "P3M"), - YEAR( - "'dt'=yyyy", - "'y'=yyyy", - "'y'=yyyy", - 1, - "P1Y" - ), - ALL(AllGranularity.getInstance()), - NONE(NoneGranularity.getInstance()); - - private final String hiveFormat; - private final String lowerDefaultFormat; - private final String defaultFormat; - private final int dateValuePositions; - private final Period period; - private final Granularity defaultGranularity; - - GranularityType(Granularity specialGranularity) - { - this.hiveFormat = null; - this.lowerDefaultFormat = null; - this.defaultFormat = null; - this.dateValuePositions = 0; - this.period = null; - this.defaultGranularity = specialGranularity; - } - - GranularityType( - final String hiveFormat, - final String lowerDefaultFormat, - final String defaultFormat, - final int dateValuePositions, - final String period - ) - { - this.hiveFormat = hiveFormat; - this.lowerDefaultFormat = lowerDefaultFormat; - this.defaultFormat = defaultFormat; - this.dateValuePositions = dateValuePositions; - this.period = new Period(period); - this.defaultGranularity = new PeriodGranularity(this.period, null, null); - } - - GranularityType(GranularityType granularityType, String period) - { - this( - granularityType.getHiveFormat(), - granularityType.getLowerDefaultFormat(), - granularityType.getDefaultFormat(), - granularityType.dateValuePositions, - period - ); - } - - Granularity create(DateTime origin, DateTimeZone tz) - { - if (period != null && (origin != null || tz != null)) { - return new PeriodGranularity(period, origin, tz); - } else { - // If All or None granularity, or if origin and tz are both null, return the cached granularity - return defaultGranularity; - } - } - - public static DateTime getDateTime(GranularityType granularityType, Integer[] vals) - { - if (granularityType.dateValuePositions == 0) { - // All or None granularity - return null; - } - for (int i = 1; i <= granularityType.dateValuePositions; i++) { - if (vals[i] == null) { - return null; - } - } - return new DateTime( - vals[1], - granularityType.dateValuePositions >= 2 ? vals[2] : 1, - granularityType.dateValuePositions >= 3 ? vals[3] : 1, - granularityType.dateValuePositions >= 4 ? vals[4] : 0, - granularityType.dateValuePositions >= 5 ? vals[5] : 0, - granularityType.dateValuePositions >= 6 ? vals[6] : 0, - 0 - ); - } - - /** - * Note: This is only an estimate based on the values in period. - * This will not work for complicated periods that represent say 1 year 1 day - */ - public static GranularityType fromPeriod(Period period) - { - int[] vals = period.getValues(); - int index = -1; - for (int i = 0; i < vals.length; i++) { - if (vals[i] != 0) { - if (index < 0) { - index = i; - } else { - throw new IAE("Granularity is not supported. [%s]", period); - } - } - } - - switch (index) { - case 0: - return GranularityType.YEAR; - case 1: - if (vals[index] == 4) { - return GranularityType.QUARTER; - } - else if (vals[index] == 1) { - return GranularityType.MONTH; - } - break; - case 2: - return GranularityType.WEEK; - case 3: - return GranularityType.DAY; - case 4: - if (vals[index] == 6) { - return GranularityType.SIX_HOUR; - } - else if (vals[index] == 1) { - return GranularityType.HOUR; - } - break; - case 5: - if (vals[index] == 30) { - return GranularityType.THIRTY_MINUTE; - } - else if (vals[index] == 15) { - return GranularityType.FIFTEEN_MINUTE; - } - else if (vals[index] == 10) { - return GranularityType.TEN_MINUTE; - } - else if (vals[index] == 5) { - return GranularityType.FIVE_MINUTE; - } - else if (vals[index] == 1) { - return GranularityType.MINUTE; - } - break; - case 6: - return GranularityType.SECOND; - default: - break; - } - throw new IAE("Granularity is not supported. [%s]", period); - } - - public String getHiveFormat() - { - return hiveFormat; - } - - public String getLowerDefaultFormat() - { - return lowerDefaultFormat; - } - - public String getDefaultFormat() - { - return defaultFormat; - } - } - private class IntervalIterable implements Iterable { private final Interval inputInterval; diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java new file mode 100644 index 00000000000..e39bc404405 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java @@ -0,0 +1,262 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common.granularity; + +import io.druid.java.util.common.IAE; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.Period; + +/** + * Only to create a mapping of the granularity and all the supported file patterns + * namely: default, lowerDefault and hive. + */ +public enum GranularityType +{ + SECOND( + "'dt'=yyyy-MM-dd-HH-mm-ss", + "'y'=yyyy/'m'=MM/'d'=dd/'h'=HH/'m'=mm/'s'=ss", + "'y'=yyyy/'m'=MM/'d'=dd/'H'=HH/'M'=mm/'S'=ss", + 6, + "PT1S" + ), + MINUTE( + "'dt'=yyyy-MM-dd-HH-mm", + "'y'=yyyy/'m'=MM/'d'=dd/'h'=HH/'m'=mm", + "'y'=yyyy/'m'=MM/'d'=dd/'H'=HH/'M'=mm", + 5, + "PT1M" + ), + FIVE_MINUTE(MINUTE, "PT5M"), + TEN_MINUTE(MINUTE, "PT10M"), + FIFTEEN_MINUTE(MINUTE, "PT15M"), + THIRTY_MINUTE(MINUTE, "PT30M"), + HOUR( + "'dt'=yyyy-MM-dd-HH", + "'y'=yyyy/'m'=MM/'d'=dd/'h'=HH", + "'y'=yyyy/'m'=MM/'d'=dd/'H'=HH", + 4, + "PT1H" + ), + SIX_HOUR(HOUR, "PT6H"), + DAY( + "'dt'=yyyy-MM-dd", + "'y'=yyyy/'m'=MM/'d'=dd", + "'y'=yyyy/'m'=MM/'d'=dd", + 3, + "P1D" + ), + WEEK(DAY, "P1W"), + MONTH( + "'dt'=yyyy-MM", + "'y'=yyyy/'m'=MM", + "'y'=yyyy/'m'=MM", + 2, + "P1M" + ), + QUARTER(MONTH, "P3M"), + YEAR( + "'dt'=yyyy", + "'y'=yyyy", + "'y'=yyyy", + 1, + "P1Y" + ), + ALL(new AllGranularity()), + NONE(new NoneGranularity()); + + private final String hiveFormat; + private final String lowerDefaultFormat; + private final String defaultFormat; + private final int dateValuePositions; + private final Period period; + private final Granularity defaultGranularity; + + GranularityType(Granularity specialGranularity) + { + this.hiveFormat = null; + this.lowerDefaultFormat = null; + this.defaultFormat = null; + this.dateValuePositions = 0; + this.period = null; + this.defaultGranularity = specialGranularity; + } + + GranularityType( + final String hiveFormat, + final String lowerDefaultFormat, + final String defaultFormat, + final int dateValuePositions, + final String period + ) + { + this.hiveFormat = hiveFormat; + this.lowerDefaultFormat = lowerDefaultFormat; + this.defaultFormat = defaultFormat; + this.dateValuePositions = dateValuePositions; + this.period = new Period(period); + this.defaultGranularity = new PeriodGranularity(this.period, null, null); + } + + GranularityType(GranularityType granularityType, String period) + { + this( + granularityType.getHiveFormat(), + granularityType.getLowerDefaultFormat(), + granularityType.getDefaultFormat(), + granularityType.dateValuePositions, + period + ); + } + + Granularity create(DateTime origin, DateTimeZone tz) + { + if (period != null && (origin != null || tz != null)) { + return new PeriodGranularity(period, origin, tz); + } else { + // If All or None granularity, or if origin and tz are both null, return the cached granularity + return defaultGranularity; + } + } + + public Granularity getDefaultGranularity() + { + return defaultGranularity; + } + + public DateTime getDateTime(Integer[] vals) + { + if (dateValuePositions == 0) { + // All or None granularity + return null; + } + for (int i = 1; i <= dateValuePositions; i++) { + if (vals[i] == null) { + return null; + } + } + return new DateTime( + vals[1], + dateValuePositions >= 2 ? vals[2] : 1, + dateValuePositions >= 3 ? vals[3] : 1, + dateValuePositions >= 4 ? vals[4] : 0, + dateValuePositions >= 5 ? vals[5] : 0, + dateValuePositions >= 6 ? vals[6] : 0, + 0 + ); + } + + /** + * For a select subset of granularites, users can specify them directly as string. + * These are "predefined granularities" or "standard" granularities. + * For all others, the users will have to use "Duration" or "Period" type granularities + */ + public static boolean isStandard(Granularity granularity) + { + final GranularityType[] values = GranularityType.values(); + for (GranularityType value : values) { + if (value.getDefaultGranularity().equals(granularity)) { + return true; + } + } + return false; + } + + /** + * Note: This is only an estimate based on the values in period. + * This will not work for complicated periods that represent say 1 year 1 day + */ + public static GranularityType fromPeriod(Period period) + { + int[] vals = period.getValues(); + int index = -1; + for (int i = 0; i < vals.length; i++) { + if (vals[i] != 0) { + if (index < 0) { + index = i; + } else { + throw new IAE("Granularity is not supported. [%s]", period); + } + } + } + + switch (index) { + case 0: + return GranularityType.YEAR; + case 1: + if (vals[index] == 4) { + return GranularityType.QUARTER; + } + else if (vals[index] == 1) { + return GranularityType.MONTH; + } + break; + case 2: + return GranularityType.WEEK; + case 3: + return GranularityType.DAY; + case 4: + if (vals[index] == 6) { + return GranularityType.SIX_HOUR; + } + else if (vals[index] == 1) { + return GranularityType.HOUR; + } + break; + case 5: + if (vals[index] == 30) { + return GranularityType.THIRTY_MINUTE; + } + else if (vals[index] == 15) { + return GranularityType.FIFTEEN_MINUTE; + } + else if (vals[index] == 10) { + return GranularityType.TEN_MINUTE; + } + else if (vals[index] == 5) { + return GranularityType.FIVE_MINUTE; + } + else if (vals[index] == 1) { + return GranularityType.MINUTE; + } + break; + case 6: + return GranularityType.SECOND; + default: + break; + } + throw new IAE("Granularity is not supported. [%s]", period); + } + + public String getHiveFormat() + { + return hiveFormat; + } + + public String getLowerDefaultFormat() + { + return lowerDefaultFormat; + } + + public String getDefaultFormat() + { + return defaultFormat; + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java index 0b53e3f5800..1fff1a42bab 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java @@ -25,16 +25,13 @@ import org.joda.time.format.DateTimeFormatter; /** * NoneGranularity does not bucket data */ -public final class NoneGranularity extends Granularity +public class NoneGranularity extends Granularity { - private static final NoneGranularity INSTANCE = new NoneGranularity(); - - private NoneGranularity() {} - - public static NoneGranularity getInstance() - { - return INSTANCE; - } + /** + * This constructor is public b/c it is serialized and deserialized + * based on type in GranularityModule + */ + public NoneGranularity() {} @Override public DateTimeFormatter getFormatter(Formatter type) diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java index 456fef01cac..2e86a46969c 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java @@ -131,7 +131,7 @@ public class PeriodGranularity extends Granularity implements JsonSerializable Integer[] vals = getDateValues(filePath, formatter); GranularityType granularityType = GranularityType.fromPeriod(period); - DateTime date = GranularityType.getDateTime(granularityType, vals); + DateTime date = granularityType.getDateTime(vals); if (date != null) { return bucketStart(date); @@ -437,9 +437,9 @@ public class PeriodGranularity extends Granularity implements JsonSerializable JsonGenerator jsonGenerator, SerializerProvider serializerProvider ) throws IOException, JsonProcessingException { - // Retain the same behavior as pre-refactor granularity code. + // Retain the same behavior as before #3850. // i.e. when Granularity class was an enum. - if (PREDEFINED_GRANULARITIES.contains(this)) { + if (GranularityType.isStandard(this)) { jsonGenerator.writeString(GranularityType.fromPeriod(getPeriod()).toString()); } else { jsonGenerator.writeStartObject(); diff --git a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java index d509fdaedf2..4a9b65ca579 100644 --- a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java @@ -19,7 +19,9 @@ package io.druid.java.util.common; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.GranularityType; import io.druid.java.util.common.granularity.PeriodGranularity; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -34,15 +36,15 @@ import java.util.NoSuchElementException; public class GranularityTest { - final Granularity SECOND = Granularity.SECOND; - final Granularity MINUTE = Granularity.MINUTE; - final Granularity HOUR = Granularity.HOUR; - final Granularity SIX_HOUR = Granularity.SIX_HOUR; - final Granularity FIFTEEN_MINUTE = Granularity.FIFTEEN_MINUTE; - final Granularity DAY = Granularity.DAY; - final Granularity WEEK = Granularity.WEEK; - final Granularity MONTH = Granularity.MONTH; - final Granularity YEAR = Granularity.YEAR; + final Granularity SECOND = Granularities.SECOND; + final Granularity MINUTE = Granularities.MINUTE; + final Granularity HOUR = Granularities.HOUR; + final Granularity SIX_HOUR = Granularities.SIX_HOUR; + final Granularity FIFTEEN_MINUTE = Granularities.FIFTEEN_MINUTE; + final Granularity DAY = Granularities.DAY; + final Granularity WEEK = Granularities.WEEK; + final Granularity MONTH = Granularities.MONTH; + final Granularity YEAR = Granularities.YEAR; @Test public void testHiveFormat() { @@ -412,7 +414,7 @@ public class GranularityTest { { try { Period p = Period.years(6).withMonths(3).withSeconds(23); - Granularity.GranularityType.fromPeriod(p); + GranularityType.fromPeriod(p); Assert.fail("Complicated period creation should fail b/c of unsupported granularity type."); } catch (IAE e) { diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index ccfd3d3e62d..a835394052f 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; @@ -345,7 +346,7 @@ public class Druids descending = false; virtualColumns = null; dimFilter = null; - granularity = Granularity.ALL; + granularity = Granularities.ALL; aggregatorSpecs = Lists.newArrayList(); postAggregatorSpecs = Lists.newArrayList(); context = null; @@ -573,7 +574,7 @@ public class Druids { dataSource = null; dimFilter = null; - granularity = Granularity.ALL; + granularity = Granularities.ALL; limit = 0; querySegmentSpec = null; dimensions = null; @@ -1132,7 +1133,7 @@ public class Druids querySegmentSpec = null; context = null; dimFilter = null; - granularity = Granularity.ALL; + granularity = Granularities.ALL; dimensions = Lists.newArrayList(); metrics = Lists.newArrayList(); pagingSpec = null; diff --git a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java index 6f60d6ffe5d..16c18126db1 100644 --- a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import io.druid.common.guava.GuavaUtils; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -53,7 +54,7 @@ public class TimeFormatExtractionFn implements ExtractionFn this.format = format; this.tz = tz; this.locale = localeString == null ? null : Locale.forLanguageTag(localeString); - this.granularity = granularity == null ? Granularity.NONE : granularity; + this.granularity = granularity == null ? Granularities.NONE : granularity; if (asMillis && format == null) { Preconditions.checkArgument(tz == null, "timeZone requires a format"); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java index 98a49f1cc37..9184ad38689 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java @@ -34,6 +34,7 @@ import com.google.common.primitives.Longs; import io.druid.data.input.Row; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -354,7 +355,7 @@ public class GroupByQuery extends BaseQuery private Comparator getTimeComparator(boolean granular) { - if (Granularity.ALL.equals(granularity)) { + if (Granularities.ALL.equals(granularity)) { return null; } else if (granular) { return new Comparator() diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java index 2f87c7ca0b4..64f67e14a4c 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; @@ -69,7 +70,7 @@ public class GroupByQueryHelper final long timeStart = query.getIntervals().get(0).getStartMillis(); long granTimeStart = timeStart; - if (!(Granularity.ALL.equals(gran))) { + if (!(Granularities.ALL.equals(gran))) { granTimeStart = gran.bucketStart(new DateTime(timeStart)).getMillis(); } diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java index 5f335c85976..100955ebdc0 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java @@ -37,6 +37,7 @@ import io.druid.data.input.Row; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Merging; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -116,7 +117,7 @@ public class GroupByStrategyV2 implements GroupByStrategy if (!timestampStringFromContext.isEmpty()) { return new DateTime(Long.parseLong(timestampStringFromContext)); - } else if (Granularity.ALL.equals(gran)) { + } else if (Granularities.ALL.equals(gran)) { final long timeStart = query.getIntervals().get(0).getStartMillis(); return gran.getIterable(new Interval(timeStart, timeStart + 1)).iterator().next().getStart(); } else { diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java index c7d56a4005b..ec2e3f83cf8 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java @@ -27,7 +27,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Longs; import io.druid.common.utils.StringUtils; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.logger.Logger; @@ -253,7 +253,7 @@ public class SegmentAnalyzer null, new Interval(start, end), VirtualColumns.EMPTY, - Granularity.ALL, + Granularities.ALL, false ); diff --git a/processing/src/main/java/io/druid/query/search/search/SearchQuery.java b/processing/src/main/java/io/druid/query/search/search/SearchQuery.java index d6e42355508..c45a21cddb0 100644 --- a/processing/src/main/java/io/druid/query/search/search/SearchQuery.java +++ b/processing/src/main/java/io/druid/query/search/search/SearchQuery.java @@ -22,6 +22,7 @@ package io.druid.query.search.search; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.BaseQuery; import io.druid.query.DataSource; @@ -67,7 +68,7 @@ public class SearchQuery extends BaseQuery> this.dimFilter = dimFilter; this.sortSpec = sortSpec == null ? DEFAULT_SORT_SPEC : sortSpec; - this.granularity = granularity == null ? Granularity.ALL : granularity; + this.granularity = granularity == null ? Granularities.ALL : granularity; this.limit = (limit == 0) ? 1000 : limit; this.dimensions = dimensions; this.querySpec = querySpec == null ? new AllSearchQuerySpec() : querySpec; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java index 0cda892163e..e3b383cc78e 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java @@ -22,8 +22,8 @@ package io.druid.query.timeboundary; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.inject.Inject; -import io.druid.java.util.common.granularity.AllGranularity; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.BaseSequence; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -114,7 +114,7 @@ public class TimeBoundaryQueryRunnerFactory legacyQuery.getQuerySegmentSpec().getIntervals(), Filters.toFilter(legacyQuery.getDimensionsFilter()), VirtualColumns.EMPTY, descending, - AllGranularity.getInstance(), + Granularities.ALL, this.skipToFirstMatching ); final List> resultList = Sequences.toList( diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java b/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java index f70ce05a448..ae7100af27d 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryBuilder.java @@ -20,6 +20,7 @@ package io.druid.query.topn; import com.google.common.collect.Lists; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.DataSource; import io.druid.query.TableDataSource; @@ -84,7 +85,7 @@ public class TopNQueryBuilder threshold = 0; querySegmentSpec = null; dimFilter = null; - granularity = Granularity.ALL; + granularity = Granularities.ALL; aggregatorSpecs = Lists.newArrayList(); postAggregatorSpecs = Lists.newArrayList(); context = null; diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java index e900aa65330..d9e13851178 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexSchema.java @@ -22,6 +22,7 @@ package io.druid.segment.incremental; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.VirtualColumns; @@ -106,7 +107,7 @@ public class IncrementalIndexSchema public Builder() { this.minTimestamp = 0L; - this.gran = Granularity.NONE; + this.gran = Granularities.NONE; this.virtualColumns = VirtualColumns.EMPTY; this.dimensionsSpec = new DimensionsSpec(null, null, null); this.metrics = new AggregatorFactory[]{}; diff --git a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java index 459bf3236c5..546f8789cc9 100644 --- a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java +++ b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.granularity.DurationGranularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import org.joda.time.DateTime; @@ -53,7 +54,7 @@ public class QueryGranularityTest @Test public void testIterableNone() throws Exception { - final Iterator iterator = Granularity.NONE.getIterable(new Interval(0, 1000)).iterator(); + final Iterator iterator = Granularities.NONE.getIterable(new Interval(0, 1000)).iterator(); int count = 0; while (iterator.hasNext()) { Assert.assertEquals(count, iterator.next().getStartMillis()); @@ -72,7 +73,7 @@ public class QueryGranularityTest new DateTime("2011-01-01T09:39:00.000Z"), new DateTime("2011-01-01T09:40:00.000Z") ), - Granularity.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) + Granularities.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) ); } @@ -88,7 +89,7 @@ public class QueryGranularityTest new DateTime("2011-01-01T09:40:00.000Z"), new DateTime("2011-01-01T09:41:00.000Z") ), - Granularity.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) + Granularities.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) ); } @@ -103,7 +104,7 @@ public class QueryGranularityTest new DateTime("2011-01-01T09:45:00.000Z"), new DateTime("2011-01-01T10:00:00.000Z") ), - Granularity.FIFTEEN_MINUTE.getIterable( + Granularities.FIFTEEN_MINUTE.getIterable( new Interval( baseTime.getMillis(), baseTime.plus(Minutes.minutes(45)).getMillis() )) @@ -122,7 +123,7 @@ public class QueryGranularityTest new DateTime("2011-01-01T10:00:00.000Z"), new DateTime("2011-01-01T10:15:00.000Z") ), - Granularity.FIFTEEN_MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.minutes(45)).getMillis())) + Granularities.FIFTEEN_MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.minutes(45)).getMillis())) ); } @@ -136,7 +137,7 @@ public class QueryGranularityTest new DateTime("2011-01-01T09:00:00.000Z"), new DateTime("2011-01-01T10:00:00.000Z"), new DateTime("2011-01-01T11:00:00.000Z") - ), Granularity.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) + ), Granularities.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) ); } @@ -151,7 +152,7 @@ public class QueryGranularityTest new DateTime("2011-01-01T10:00:00.000Z"), new DateTime("2011-01-01T11:00:00.000Z"), new DateTime("2011-01-01T12:00:00.000Z") - ), Granularity.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) + ), Granularities.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) ); } @@ -166,7 +167,7 @@ public class QueryGranularityTest new DateTime("2011-01-02T00:00:00.000Z"), new DateTime("2011-01-03T00:00:00.000Z") ), - Granularity.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) ); } @@ -182,7 +183,7 @@ public class QueryGranularityTest new DateTime("2011-01-03T00:00:00.000Z"), new DateTime("2011-01-04T00:00:00.000Z") ), - Granularity.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) ); } @@ -197,7 +198,7 @@ public class QueryGranularityTest new DateTime("2011-01-10T00:00:00.000Z"), new DateTime("2011-01-17T00:00:00.000Z") ), - Granularity.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) + Granularities.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) ); } @@ -213,7 +214,7 @@ public class QueryGranularityTest new DateTime("2011-01-10T00:00:00.000Z"), new DateTime("2011-01-17T00:00:00.000Z") ), - Granularity.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) + Granularities.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) ); } @@ -228,7 +229,7 @@ public class QueryGranularityTest new DateTime("2011-02-01T00:00:00.000Z"), new DateTime("2011-03-01T00:00:00.000Z") ), - Granularity.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) + Granularities.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) ); } @@ -244,7 +245,7 @@ public class QueryGranularityTest new DateTime("2011-03-01T00:00:00.000Z"), new DateTime("2011-04-01T00:00:00.000Z") ), - Granularity.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) + Granularities.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) ); } @@ -259,7 +260,7 @@ public class QueryGranularityTest new DateTime("2011-04-01T00:00:00.000Z"), new DateTime("2011-07-01T00:00:00.000Z") ), - Granularity.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) + Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) ); } @@ -275,7 +276,7 @@ public class QueryGranularityTest new DateTime("2011-07-01T00:00:00.000Z"), new DateTime("2011-10-01T00:00:00.000Z") ), - Granularity.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) + Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) ); } @@ -290,7 +291,7 @@ public class QueryGranularityTest new DateTime("2012-01-01T00:00:00.000Z"), new DateTime("2013-01-01T00:00:00.000Z") ), - Granularity.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) + Granularities.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) ); } @@ -306,7 +307,7 @@ public class QueryGranularityTest new DateTime("2013-01-01T00:00:00.000Z"), new DateTime("2014-01-01T00:00:00.000Z") ), - Granularity.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) + Granularities.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) ); } @@ -606,7 +607,7 @@ public class QueryGranularityTest assertSameInterval( Lists.newArrayList(baseTime), - Granularity.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) ); } @@ -617,7 +618,7 @@ public class QueryGranularityTest assertSameInterval( Lists.newArrayList(baseTime), - Granularity.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) ); } @@ -684,17 +685,17 @@ public class QueryGranularityTest ObjectMapper mapper = new DefaultObjectMapper(); Assert.assertEquals( - Granularity.ALL, + Granularities.ALL, mapper.readValue( - mapper.writeValueAsString(Granularity.ALL), + mapper.writeValueAsString(Granularities.ALL), Granularity.class ) ); Assert.assertEquals( - Granularity.NONE, + Granularities.NONE, mapper.readValue( - mapper.writeValueAsString(Granularity.NONE), + mapper.writeValueAsString(Granularities.NONE), Granularity.class ) ); @@ -705,20 +706,20 @@ public class QueryGranularityTest { ObjectMapper mapper = new DefaultObjectMapper(); - Assert.assertEquals(Granularity.ALL, mapper.readValue("\"all\"", Granularity.class)); - Assert.assertEquals(Granularity.ALL, mapper.readValue("\"ALL\"", Granularity.class)); - Assert.assertEquals(Granularity.NONE, mapper.readValue("\"none\"", Granularity.class)); - Assert.assertEquals(Granularity.NONE, mapper.readValue("\"NONE\"", Granularity.class)); + Assert.assertEquals(Granularities.ALL, mapper.readValue("\"all\"", Granularity.class)); + Assert.assertEquals(Granularities.ALL, mapper.readValue("\"ALL\"", Granularity.class)); + Assert.assertEquals(Granularities.NONE, mapper.readValue("\"none\"", Granularity.class)); + Assert.assertEquals(Granularities.NONE, mapper.readValue("\"NONE\"", Granularity.class)); - Assert.assertEquals(Granularity.DAY, mapper.readValue("\"day\"", Granularity.class)); - Assert.assertEquals(Granularity.HOUR, mapper.readValue("\"hour\"", Granularity.class)); - Assert.assertEquals(Granularity.MINUTE, mapper.readValue("\"minute\"", Granularity.class)); - Assert.assertEquals(Granularity.FIFTEEN_MINUTE, mapper.readValue("\"fifteen_minute\"", Granularity.class)); + Assert.assertEquals(Granularities.DAY, mapper.readValue("\"day\"", Granularity.class)); + Assert.assertEquals(Granularities.HOUR, mapper.readValue("\"hour\"", Granularity.class)); + Assert.assertEquals(Granularities.MINUTE, mapper.readValue("\"minute\"", Granularity.class)); + Assert.assertEquals(Granularities.FIFTEEN_MINUTE, mapper.readValue("\"fifteen_minute\"", Granularity.class)); - Assert.assertEquals(Granularity.WEEK, mapper.readValue("\"week\"", Granularity.class)); - Assert.assertEquals(Granularity.QUARTER, mapper.readValue("\"quarter\"", Granularity.class)); - Assert.assertEquals(Granularity.MONTH, mapper.readValue("\"month\"", Granularity.class)); - Assert.assertEquals(Granularity.YEAR, mapper.readValue("\"year\"", Granularity.class)); + Assert.assertEquals(Granularities.WEEK, mapper.readValue("\"week\"", Granularity.class)); + Assert.assertEquals(Granularities.QUARTER, mapper.readValue("\"quarter\"", Granularity.class)); + Assert.assertEquals(Granularities.MONTH, mapper.readValue("\"month\"", Granularity.class)); + Assert.assertEquals(Granularities.YEAR, mapper.readValue("\"year\"", Granularity.class)); } @Test @@ -726,24 +727,24 @@ public class QueryGranularityTest { Assert.assertNull(Granularity.mergeGranularities(null)); Assert.assertNull(Granularity.mergeGranularities(ImmutableList.of())); - Assert.assertNull(Granularity.mergeGranularities(Lists.newArrayList(null, Granularity.DAY))); - Assert.assertNull(Granularity.mergeGranularities(Lists.newArrayList(Granularity.DAY, null))); + Assert.assertNull(Granularity.mergeGranularities(Lists.newArrayList(null, Granularities.DAY))); + Assert.assertNull(Granularity.mergeGranularities(Lists.newArrayList(Granularities.DAY, null))); Assert.assertNull( Granularity.mergeGranularities( Lists.newArrayList( - Granularity.DAY, + Granularities.DAY, null, - Granularity.DAY + Granularities.DAY ) ) ); Assert.assertNull( - Granularity.mergeGranularities(ImmutableList.of(Granularity.ALL, Granularity.DAY)) + Granularity.mergeGranularities(ImmutableList.of(Granularities.ALL, Granularities.DAY)) ); Assert.assertEquals( - Granularity.ALL, - Granularity.mergeGranularities(ImmutableList.of(Granularity.ALL, Granularity.ALL)) + Granularities.ALL, + Granularity.mergeGranularities(ImmutableList.of(Granularities.ALL, Granularities.ALL)) ); } @@ -773,14 +774,14 @@ public class QueryGranularityTest Assert.assertFalse("expectedIter not exhausted!?", expectedIter.hasNext()); } - @Test(timeout = 60_000L) + @Test(timeout = 10_000L) public void testDeadLock() throws Exception { final URL[] urls = ((URLClassLoader)Granularity.class.getClassLoader()).getURLs(); final String className = Granularity.class.getCanonicalName(); for(int i = 0; i < 1000; ++i) { final ClassLoader loader = new URLClassLoader(urls, null); - Assert.assertNotNull(Class.forName(className, true, loader)); + Assert.assertNotNull(String.valueOf(i), Class.forName(className, true, loader)); } } } diff --git a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java index d77571460dc..d4fb1223ec8 100644 --- a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java @@ -31,7 +31,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; @@ -115,7 +115,7 @@ public class MultiValuedDimensionTest { incrementalIndex = new OnheapIncrementalIndex( 0, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{ new CountAggregatorFactory("count") }, @@ -160,7 +160,7 @@ public class MultiValuedDimensionTest .builder() .setDataSource("xx") .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) .setAggregatorSpecs( Arrays.asList( @@ -201,7 +201,7 @@ public class MultiValuedDimensionTest .builder() .setDataSource("xx") .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) .setAggregatorSpecs( Arrays.asList( @@ -242,7 +242,7 @@ public class MultiValuedDimensionTest .builder() .setDataSource("xx") .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( Lists.newArrayList( new RegexFilteredDimensionSpec( @@ -284,7 +284,7 @@ public class MultiValuedDimensionTest { TopNQuery query = new TopNQueryBuilder() .dataSource("xx") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension(new ListFilteredDimensionSpec( new DefaultDimensionSpec("tags", "tags"), ImmutableSet.of("t3"), diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index c0de8fd8390..46c44a29c4e 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -26,6 +26,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; import io.druid.java.util.common.UOE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.MergeSequence; import io.druid.java.util.common.guava.Sequence; @@ -104,9 +105,9 @@ public class QueryRunnerTestHelper public static final DateTime minTime = new DateTime("2011-01-12T00:00:00.000Z"); - public static final Granularity dayGran = Granularity.DAY; - public static final Granularity allGran = Granularity.ALL; - public static final Granularity monthGran = Granularity.MONTH; + public static final Granularity dayGran = Granularities.DAY; + public static final Granularity allGran = Granularities.ALL; + public static final Granularity monthGran = Granularities.MONTH; public static final String timeDimension = "__time"; public static final String marketDimension = "market"; public static final String qualityDimension = "quality"; diff --git a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java index 34f95c5d60f..c85ed44f1f9 100644 --- a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java +++ b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java @@ -19,6 +19,7 @@ package io.druid.query; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; import org.junit.Assert; @@ -55,7 +56,7 @@ public class ResultGranularTimestampComparatorTest Result r1 = new Result(time, null); Result r2 = new Result(time.plusYears(5), null); - Assert.assertEquals(ResultGranularTimestampComparator.create(Granularity.ALL, descending).compare(r1, r2), 0); + Assert.assertEquals(ResultGranularTimestampComparator.create(Granularities.ALL, descending).compare(r1, r2), 0); } @Test @@ -66,7 +67,7 @@ public class ResultGranularTimestampComparatorTest Result greater = new Result(time.plusHours(25), null); Result less = new Result(time.minusHours(1), null); - Granularity day = Granularity.DAY; + Granularity day = Granularities.DAY; Assert.assertEquals(ResultGranularTimestampComparator.create(day, descending).compare(res, same), 0); Assert.assertEquals(ResultGranularTimestampComparator.create(day, descending).compare(res, greater), descending ? 1 : -1); Assert.assertEquals(ResultGranularTimestampComparator.create(day, descending).compare(res, less), descending ? -1 : 1); @@ -80,7 +81,7 @@ public class ResultGranularTimestampComparatorTest Result greater = new Result(time.plusHours(1), null); Result less = new Result(time.minusHours(1), null); - Granularity hour = Granularity.HOUR; + Granularity hour = Granularities.HOUR; Assert.assertEquals(ResultGranularTimestampComparator.create(hour, descending).compare(res, same), 0); Assert.assertEquals(ResultGranularTimestampComparator.create(hour, descending).compare(res, greater), descending ? 1 : -1); Assert.assertEquals(ResultGranularTimestampComparator.create(hour, descending).compare(res, less), descending ? -1 : 1); diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java index 857add4148a..3f50290af54 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java @@ -21,8 +21,8 @@ package io.druid.query.aggregation.hyperloglog; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedRow; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.AggregatorsModule; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; @@ -114,7 +114,7 @@ public class HyperUniquesAggregationTest parseSpec, metricSpec, 0, - Granularity.NONE, + Granularities.NONE, 50000, query ); @@ -176,7 +176,7 @@ public class HyperUniquesAggregationTest parseSpec, metricSpec, 0, - Granularity.DAY, + Granularities.DAY, 50000, query ); diff --git a/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java index 72634486d63..d6309f40c54 100644 --- a/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java @@ -25,7 +25,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import io.druid.data.input.MapBasedRow; import io.druid.jackson.AggregatorsModule; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregationTestHelper; @@ -229,7 +229,7 @@ public class FinalizingFieldAccessPostAggregatorTest parseSpec, metricSpec, 0, - Granularity.NONE, + Granularities.NONE, 50000, query ); diff --git a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index 9669a495bdd..99b91fd8d33 100644 --- a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -26,8 +26,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.Query; @@ -112,7 +112,7 @@ public class DataSourceMetadataQueryTest public void testMaxIngestedEventTime() throws Exception { final IncrementalIndex rtIndex = new OnheapIncrementalIndex( - 0L, Granularity.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 + 0L, Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 ); ; final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner( diff --git a/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java index 3cd5367e903..37a8b2529a0 100644 --- a/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java @@ -21,6 +21,7 @@ package io.druid.query.extraction; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -52,7 +53,7 @@ public class TimeFormatExtractionFnTest Assert.assertEquals("Saturday", fn.apply(timestamps[4])); Assert.assertEquals("Monday", fn.apply(timestamps[5])); - testSerde(fn, "EEEE", null, null, Granularity.NONE); + testSerde(fn, "EEEE", null, null, Granularities.NONE); } @Test @@ -66,13 +67,13 @@ public class TimeFormatExtractionFnTest Assert.assertEquals("laugardagur", fn.apply(timestamps[4])); Assert.assertEquals("mánudagur", fn.apply(timestamps[5])); - testSerde(fn, "EEEE", null, "is", Granularity.NONE); + testSerde(fn, "EEEE", null, "is", Granularities.NONE); } @Test public void testGranularExtractionWithNullPattern() throws Exception { - TimeFormatExtractionFn fn = new TimeFormatExtractionFn(null, null, null, Granularity.DAY, false); + TimeFormatExtractionFn fn = new TimeFormatExtractionFn(null, null, null, Granularities.DAY, false); Assert.assertEquals("2015-01-01T00:00:00.000Z", fn.apply(timestamps[0])); Assert.assertEquals("2015-01-02T00:00:00.000Z", fn.apply(timestamps[1])); Assert.assertEquals("2015-03-03T00:00:00.000Z", fn.apply(timestamps[2])); @@ -80,7 +81,7 @@ public class TimeFormatExtractionFnTest Assert.assertEquals("2015-05-02T00:00:00.000Z", fn.apply(timestamps[4])); Assert.assertEquals("2015-12-21T00:00:00.000Z", fn.apply(timestamps[5])); - testSerde(fn, null, null, null, Granularity.DAY); + testSerde(fn, null, null, null, Granularities.DAY); } @Test @@ -100,7 +101,7 @@ public class TimeFormatExtractionFnTest Assert.assertEquals("In Berlin ist es schon Sonntag", fn.apply(timestamps[4])); Assert.assertEquals("In Berlin ist es schon Dienstag", fn.apply(timestamps[5])); - testSerde(fn, "'In Berlin ist es schon 'EEEE", DateTimeZone.forID("Europe/Berlin"), "de", Granularity.NONE); + testSerde(fn, "'In Berlin ist es schon 'EEEE", DateTimeZone.forID("Europe/Berlin"), "de", Granularities.NONE); } public void testSerde( diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java index 6c1e19da020..8a7e695ca18 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java @@ -21,8 +21,8 @@ package io.druid.query.groupby; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -66,7 +66,7 @@ public class GroupByQueryConfigTest GroupByQuery.builder() .setDataSource("test") .setInterval(new Interval("2000/P1D")) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .build() ); @@ -88,7 +88,7 @@ public class GroupByQueryConfigTest GroupByQuery.builder() .setDataSource("test") .setInterval(new Interval("2000/P1D")) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setContext( ImmutableMap.of( "groupByStrategy", "v1", diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java index 8b7ff4e08d8..460c9d8c851 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryMergeBufferTest.java @@ -30,7 +30,7 @@ import io.druid.collections.BlockingPool; import io.druid.collections.ReferenceCountingResourceHolder; import io.druid.collections.StupidPool; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DruidProcessingConfig; import io.druid.query.QueryContextKeys; import io.druid.query.QueryDataSource; @@ -227,7 +227,7 @@ public class GroupByQueryMergeBufferTest final GroupByQuery query = GroupByQuery .builder() .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, Integers.valueOf(500))) @@ -249,13 +249,13 @@ public class GroupByQueryMergeBufferTest GroupByQuery.builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, Integers.valueOf(500))) @@ -279,7 +279,7 @@ public class GroupByQueryMergeBufferTest GroupByQuery.builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) @@ -288,13 +288,13 @@ public class GroupByQueryMergeBufferTest .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, Integers.valueOf(500))) @@ -321,7 +321,7 @@ public class GroupByQueryMergeBufferTest GroupByQuery.builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null), @@ -331,7 +331,7 @@ public class GroupByQueryMergeBufferTest .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) @@ -340,13 +340,13 @@ public class GroupByQueryMergeBufferTest .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, Integers.valueOf(500))) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java index f4f35abe586..dc7648154da 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java @@ -28,7 +28,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.MergeSequence; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -68,7 +68,7 @@ public class GroupByQueryRunnerFactoryTest .builder() .setDataSource("xx") .setQuerySegmentSpec(new LegacySegmentSpec("1970/3000")) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("tags", "tags"))) .setAggregatorSpecs( Arrays.asList( @@ -129,7 +129,7 @@ public class GroupByQueryRunnerFactoryTest { IncrementalIndex incrementalIndex = new OnheapIncrementalIndex( 0, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{ new CountAggregatorFactory("count") }, diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java index 205d91b0774..a2f6f6b8d77 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFailureTest.java @@ -30,7 +30,7 @@ import io.druid.collections.BlockingPool; import io.druid.collections.ReferenceCountingResourceHolder; import io.druid.collections.StupidPool; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DruidProcessingConfig; import io.druid.query.InsufficientResourcesException; import io.druid.query.QueryContextKeys; @@ -192,13 +192,13 @@ public class GroupByQueryRunnerFailureTest GroupByQuery.builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, Integers.valueOf(500))) @@ -221,7 +221,7 @@ public class GroupByQueryRunnerFailureTest GroupByQuery.builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) @@ -230,13 +230,13 @@ public class GroupByQueryRunnerFailureTest .build() ) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, Integers.valueOf(500))) @@ -256,13 +256,13 @@ public class GroupByQueryRunnerFailureTest GroupByQuery.builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setInterval(QueryRunnerTestHelper.firstToThird) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList(new DefaultDimensionSpec("quality", "alias"))) .setAggregatorSpecs(Lists.newArrayList(QueryRunnerTestHelper.rowsCount)) .build() ) ) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(QueryRunnerTestHelper.firstToThird) .setAggregatorSpecs(Lists.newArrayList(new LongSumAggregatorFactory("rows", "rows"))) .setContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, Integers.valueOf(500))) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index dfcf04063b3..f9ce8b563ed 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -35,10 +35,10 @@ import com.google.common.util.concurrent.MoreExecutors; import io.druid.collections.BlockingPool; import io.druid.collections.StupidPool; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.MergeSequence; import io.druid.java.util.common.guava.Sequence; @@ -2150,7 +2150,7 @@ public class GroupByQueryRunnerTest .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); final GroupByQuery fullQuery = builder.build(); - final GroupByQuery allGranQuery = builder.copy().setGranularity(Granularity.ALL).build(); + final GroupByQuery allGranQuery = builder.copy().setGranularity(Granularities.ALL).build(); QueryRunner mergedRunner = factory.getToolchest().mergeResults( new QueryRunner() @@ -2268,7 +2268,7 @@ public class GroupByQueryRunnerTest new LongSumAggregatorFactory("idx", "index") ) ) - .setGranularity(Granularity.DAY) + .setGranularity(Granularities.DAY) .setLimit(limit) .addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING); @@ -2318,7 +2318,7 @@ public class GroupByQueryRunnerTest new LongSumAggregatorFactory("idx", "expr") ) ) - .setGranularity(Granularity.DAY) + .setGranularity(Granularities.DAY) .setLimit(limit) .addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING); @@ -3279,7 +3279,7 @@ public class GroupByQueryRunnerTest new DoubleSumAggregatorFactory("index", "index") ) ) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setHavingSpec(new GreaterThanHavingSpec("index", 310L)) .setLimitSpec( new DefaultLimitSpec( diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index 7f56ad99028..fa7a583cdde 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -25,7 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -140,7 +140,7 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( Arrays.asList( diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 8902c8eed7c..fbb8e25cf57 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -28,8 +28,8 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import io.druid.common.utils.JodaUtils; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.BySegmentResultValue; import io.druid.query.BySegmentResultValueClass; @@ -785,7 +785,7 @@ public class SegmentMetadataQueryTest expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), null, null, - Granularity.NONE, + Granularities.NONE, null ); diff --git a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java index 6077f0df418..b6edd063092 100644 --- a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java @@ -20,7 +20,7 @@ package io.druid.query.search; import com.google.common.collect.ImmutableList; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.ordering.StringComparators; import io.druid.query.search.search.SearchHit; @@ -97,7 +97,7 @@ public class SearchBinaryFnTest ) ); - Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -130,7 +130,7 @@ public class SearchBinaryFnTest ); Result expected = new Result( - Granularity.DAY.bucketStart(currTime), + Granularities.DAY.bucketStart(currTime), new SearchResultValue( ImmutableList.of( new SearchHit( @@ -145,7 +145,7 @@ public class SearchBinaryFnTest ) ); - Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularity.DAY, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularities.DAY, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -169,7 +169,7 @@ public class SearchBinaryFnTest Result expected = r1; - Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -217,7 +217,7 @@ public class SearchBinaryFnTest ) ); - Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -243,7 +243,7 @@ public class SearchBinaryFnTest new SearchResultValue(toHits(c, "blah:short", "blah:thisislong")) ); - Result actual = new SearchBinaryFn(searchSortSpec, Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(searchSortSpec, Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -269,7 +269,7 @@ public class SearchBinaryFnTest new SearchResultValue(toHits(c, "blah:short", "blah:thisislong", "blah2:thisislong")) ); - Result actual = new SearchBinaryFn(searchSortSpec, Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(searchSortSpec, Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -296,7 +296,7 @@ public class SearchBinaryFnTest ); Result actual = new SearchBinaryFn( - searchSortSpec, Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + searchSortSpec, Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -331,7 +331,7 @@ public class SearchBinaryFnTest Result expected = r1; - Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -362,7 +362,7 @@ public class SearchBinaryFnTest ) ); Result expected = r1; - Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularity.ALL, 1).apply(r1, r2); + Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularities.ALL, 1).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } @@ -396,7 +396,7 @@ public class SearchBinaryFnTest Result expected = r1; - Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularity.ALL, Integer.MAX_VALUE).apply(r1, r2); + Result actual = new SearchBinaryFn(new SearchSortSpec(StringComparators.LEXICOGRAPHIC), Granularities.ALL, Integer.MAX_VALUE).apply(r1, r2); Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java index 6601b60dfd8..4db4c1a54dd 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java @@ -21,8 +21,8 @@ package io.druid.query.search; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.CacheStrategy; import io.druid.query.Druids; import io.druid.query.Result; @@ -47,7 +47,7 @@ public class SearchQueryQueryToolChestTest new SearchQuery( new TableDataSource("dummy"), null, - Granularity.ALL, + Granularities.ALL, 1, new MultipleIntervalSegmentSpec( ImmutableList.of( diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index 3af12358b35..527a3f0fa66 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -23,7 +23,7 @@ import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -746,7 +746,7 @@ public class SearchQueryRunnerTest { IncrementalIndex index = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder() - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()).build(), true, 10 diff --git a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java index c6d41c0efa2..d22952d725a 100644 --- a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java @@ -22,8 +22,8 @@ package io.druid.query.select; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.CharSource; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; @@ -177,7 +177,7 @@ public class MultiSegmentSelectQueryTest { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) - .withQueryGranularity(Granularity.HOUR) + .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); return new OnheapIncrementalIndex(schema, true, maxRowCount); diff --git a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java index 1888b612e87..aa7f5eafe29 100644 --- a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java @@ -23,8 +23,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import org.joda.time.DateTime; import org.junit.Assert; @@ -47,7 +47,7 @@ public class SelectBinaryFnTest @Test public void testApply() throws Exception { - SelectBinaryFn binaryFn = new SelectBinaryFn(Granularity.ALL, new PagingSpec(null, 5), false); + SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); Result res1 = new Result<>( new DateTime("2013-01-01"), @@ -213,7 +213,7 @@ public class SelectBinaryFnTest @Test public void testColumnMerge() throws Exception { - SelectBinaryFn binaryFn = new SelectBinaryFn(Granularity.ALL, new PagingSpec(null, 5), false); + SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); Result res1 = new Result<>( new DateTime("2013-01-01"), diff --git a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java index 83922fc12ce..04bf6d66dc8 100644 --- a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java @@ -24,8 +24,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -98,7 +98,7 @@ public class SpecificSegmentQueryRunnerTest Map responseContext = Maps.newHashMap(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( @@ -175,7 +175,7 @@ public class SpecificSegmentQueryRunnerTest final Map responseContext = Maps.newHashMap(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index f475c974107..c9490574926 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -24,7 +24,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.io.CharSource; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; @@ -116,7 +116,7 @@ public class TimeBoundaryQueryRunnerTest { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) - .withQueryGranularity(Granularity.HOUR) + .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); return new OnheapIncrementalIndex(schema, true, maxRowCount); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java index ebfa947f0fb..7d70579b79d 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java @@ -20,7 +20,7 @@ package io.druid.query.timeseries; import com.google.common.collect.ImmutableMap; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -77,7 +77,7 @@ public class TimeseriesBinaryFnTest ); Result actual = new TimeseriesBinaryFn( - Granularity.ALL, + Granularities.ALL, aggregatorFactories ).apply( result1, @@ -109,7 +109,7 @@ public class TimeseriesBinaryFnTest ); Result expected = new Result( - Granularity.DAY.bucketStart(currTime), + Granularities.DAY.bucketStart(currTime), new TimeseriesResultValue( ImmutableMap.of( "rows", 3L, @@ -119,7 +119,7 @@ public class TimeseriesBinaryFnTest ); Result actual = new TimeseriesBinaryFn( - Granularity.DAY, + Granularities.DAY, aggregatorFactories ).apply( result1, @@ -145,7 +145,7 @@ public class TimeseriesBinaryFnTest Result expected = result1; Result actual = new TimeseriesBinaryFn( - Granularity.ALL, + Granularities.ALL, aggregatorFactories ).apply( result1, @@ -187,7 +187,7 @@ public class TimeseriesBinaryFnTest ); Result actual = new TimeseriesBinaryFn( - Granularity.ALL, + Granularities.ALL, aggregatorFactories ).apply( result1, diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java index 00f1384b729..5331a0b030e 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java @@ -22,8 +22,8 @@ package io.druid.query.timeseries; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.CacheStrategy; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; @@ -76,7 +76,7 @@ public class TimeseriesQueryQueryToolChestTest descending, VirtualColumns.EMPTY, null, - Granularity.ALL, + Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), null, null diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 19b053d388d..7d9fc452b7f 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; @@ -70,7 +70,7 @@ public class TimeseriesQueryRunnerBonusTest public void testOneRowAtATime() throws Exception { final IncrementalIndex oneRowIndex = new OnheapIncrementalIndex( - new DateTime("2012-01-01T00:00:00Z").getMillis(), Granularity.NONE, new AggregatorFactory[]{}, 1000 + new DateTime("2012-01-01T00:00:00Z").getMillis(), Granularities.NONE, new AggregatorFactory[]{}, 1000 ); List> results; @@ -122,7 +122,7 @@ public class TimeseriesQueryRunnerBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 21b765084a0..c293b518190 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.Sequences; @@ -35,10 +36,10 @@ import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; -import io.druid.query.aggregation.last.DoubleLastAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.PostAggregator; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; +import io.druid.query.aggregation.last.DoubleLastAggregatorFactory; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.BoundDimFilter; @@ -154,7 +155,7 @@ public class TimeseriesQueryRunnerTest @Test public void testFullOnTimeseries() { - Granularity gran = Granularity.DAY; + Granularity gran = Granularities.DAY; TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(gran) @@ -227,7 +228,7 @@ public class TimeseriesQueryRunnerTest @Test public void testTimeseriesNoAggregators() { - Granularity gran = Granularity.DAY; + Granularity gran = Granularities.DAY; TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(gran) @@ -263,7 +264,7 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(QueryRunnerTestHelper.fullOnInterval) .aggregators( Arrays.asList( @@ -626,7 +627,7 @@ public class TimeseriesQueryRunnerTest TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market") - .granularity(Granularity.HOUR) + .granularity(Granularities.HOUR) .intervals( Arrays.asList( new Interval( @@ -647,7 +648,7 @@ public class TimeseriesQueryRunnerTest .build(); List> lotsOfZeroes = Lists.newArrayList(); - final Iterable iterable = Granularity.HOUR.getIterable(new Interval(new DateTime("2011-04-14T01").getMillis(), new DateTime("2011-04-15").getMillis())); + final Iterable iterable = Granularities.HOUR.getIterable(new Interval(new DateTime("2011-04-14T01").getMillis(), new DateTime("2011-04-15").getMillis())); for (Interval interval : iterable) { lotsOfZeroes.add( new Result<>( diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java index 1ae678c9a7d..498aac23705 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java @@ -23,7 +23,7 @@ import com.google.caliper.Param; import com.google.caliper.Runner; import com.google.caliper.SimpleBenchmark; import com.google.common.collect.Lists; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -116,7 +116,7 @@ public class TopNBinaryFnBenchmark extends SimpleBenchmark ); fn = new TopNBinaryFn( TopNResultMerger.identity, - Granularity.ALL, + Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 100, diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java index 912a59144a2..241eba6fbb2 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java @@ -22,7 +22,7 @@ package io.druid.query.topn; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -145,7 +145,7 @@ public class TopNBinaryFnTest Result actual = new TopNBinaryFn( TopNResultMerger.identity, - Granularity.ALL, + Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 2, @@ -208,7 +208,7 @@ public class TopNBinaryFnTest ); Result expected = new Result( - Granularity.DAY.bucketStart(currTime), + Granularities.DAY.bucketStart(currTime), new TopNResultValue( ImmutableList.>of( ImmutableMap.of( @@ -227,7 +227,7 @@ public class TopNBinaryFnTest Result actual = new TopNBinaryFn( TopNResultMerger.identity, - Granularity.DAY, + Granularities.DAY, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 2, @@ -272,7 +272,7 @@ public class TopNBinaryFnTest Result actual = new TopNBinaryFn( TopNResultMerger.identity, - Granularity.ALL, + Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 2, @@ -368,7 +368,7 @@ public class TopNBinaryFnTest Result actual = new TopNBinaryFn( TopNResultMerger.identity, - Granularity.ALL, + Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("addrowsindexconstant"), 3, @@ -450,7 +450,7 @@ public class TopNBinaryFnTest Result actual = new TopNBinaryFn( TopNResultMerger.identity, - Granularity.ALL, + Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 2, @@ -508,7 +508,7 @@ public class TopNBinaryFnTest Result actual = new TopNBinaryFn( TopNResultMerger.identity, - Granularity.ALL, + Granularities.ALL, new DefaultDimensionSpec("INVALID_DIM_NAME", null), new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC), 2, diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java index d62ab46a770..9e59eb11cde 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.query.CacheStrategy; import io.druid.query.Query; @@ -77,7 +77,7 @@ public class TopNQueryQueryToolChestTest ) ), null, - Granularity.ALL, + Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), ImmutableList.of(new ConstantPostAggregator("post", 10)), null @@ -129,7 +129,7 @@ public class TopNQueryQueryToolChestTest ) ), null, - Granularity.ALL, + Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), ImmutableList.of(new ConstantPostAggregator("post", 10)), null @@ -149,7 +149,7 @@ public class TopNQueryQueryToolChestTest ) ), null, - Granularity.ALL, + Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), ImmutableList.of( new ArithmeticPostAggregator( diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 4ca0be9c46e..32375566335 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -31,6 +31,7 @@ import com.google.common.collect.Sets; import io.druid.collections.StupidPool; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -47,15 +48,15 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; -import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; -import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; +import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; +import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregator; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; +import io.druid.query.aggregation.last.LongLastAggregatorFactory; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; -import io.druid.query.aggregation.last.LongLastAggregatorFactory; import io.druid.query.dimension.ExtractionDimensionSpec; import io.druid.query.dimension.ListFilteredDimensionSpec; import io.druid.query.extraction.DimExtractionFn; @@ -1715,7 +1716,7 @@ public class TopNQueryRunnerTest .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - Granularity gran = Granularity.DAY; + Granularity gran = Granularities.DAY; TimeseriesQuery tsQuery = Druids.newTimeseriesQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(gran) @@ -3342,7 +3343,7 @@ public class TopNQueryRunnerTest { TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("partial_null_column") .metric(QueryRunnerTestHelper.uniqueMetric) .threshold(1000) @@ -3379,7 +3380,7 @@ public class TopNQueryRunnerTest { TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("partial_null_column") .metric(QueryRunnerTestHelper.uniqueMetric) .filters(new SelectorDimFilter("partial_null_column", null, null)) @@ -3411,7 +3412,7 @@ public class TopNQueryRunnerTest { TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension("partial_null_column") .metric(QueryRunnerTestHelper.uniqueMetric) .filters(new SelectorDimFilter("partial_null_column", "value", null)) @@ -3443,7 +3444,7 @@ public class TopNQueryRunnerTest { TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension(QueryRunnerTestHelper.marketDimension) .metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)) .threshold(2) @@ -3475,7 +3476,7 @@ public class TopNQueryRunnerTest { TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension(QueryRunnerTestHelper.marketDimension) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .threshold(2) diff --git a/processing/src/test/java/io/druid/segment/AppendTest.java b/processing/src/test/java/io/druid/segment/AppendTest.java index c0b923e77df..a53741bd0bb 100644 --- a/processing/src/test/java/io/druid/segment/AppendTest.java +++ b/processing/src/test/java/io/druid/segment/AppendTest.java @@ -22,8 +22,9 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.granularity.Granularities; +import io.druid.java.util.common.granularity.Granularity; import io.druid.query.Druids; import io.druid.query.QueryRunner; import io.druid.query.Result; @@ -78,7 +79,7 @@ public class AppendTest }; final String dataSource = "testing"; - final Granularity allGran = Granularity.ALL; + final Granularity allGran = Granularities.ALL; final String dimensionValue = "dimension"; final String valueValue = "value"; final String marketDimension = "market"; diff --git a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java index 19037d07954..58c3f3a5314 100644 --- a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java +++ b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java @@ -22,7 +22,7 @@ package io.druid.segment; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.collections.bitmap.ConciseBitmapFactory; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.column.Column; import io.druid.segment.incremental.IncrementalIndex; @@ -51,7 +51,7 @@ public class EmptyIndexTest try { IncrementalIndex emptyIndex = new OnheapIncrementalIndex( 0, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[0], 1000 ); diff --git a/processing/src/test/java/io/druid/segment/IndexIOTest.java b/processing/src/test/java/io/druid/segment/IndexIOTest.java index 606737b6e70..d397b0a2814 100644 --- a/processing/src/test/java/io/druid/segment/IndexIOTest.java +++ b/processing/src/test/java/io/druid/segment/IndexIOTest.java @@ -29,8 +29,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.UOE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -265,7 +265,7 @@ public class IndexIOTest final IncrementalIndex incrementalIndex1 = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DEFAULT_INTERVAL.getStart().getMillis()) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics( new AggregatorFactory[]{ new CountAggregatorFactory( @@ -287,7 +287,7 @@ public class IndexIOTest final IncrementalIndex incrementalIndex2 = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DEFAULT_INTERVAL.getStart().getMillis()) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics( new AggregatorFactory[]{ new CountAggregatorFactory( diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTest.java b/processing/src/test/java/io/druid/segment/IndexMergerTest.java index 936b1851e52..38c5a64b7fe 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerTest.java @@ -27,16 +27,15 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; - -import io.druid.data.input.InputRow; import io.druid.collections.bitmap.RoaringBitmapFactory; +import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionSchema.MultiValueHandling; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -189,7 +188,7 @@ public class IndexMergerTest ); Assert.assertEquals( - Granularity.NONE, + Granularities.NONE, index.getMetadata().getQueryGranularity() ); } @@ -277,7 +276,7 @@ public class IndexMergerTest .setAggregators( IncrementalIndexTest.getDefaultCombiningAggregatorFactories() ) - .setQueryGranularity(Granularity.NONE) + .setQueryGranularity(Granularities.NONE) .setRollup(Boolean.TRUE) .putAll(metadataElems), index.getMetadata() @@ -293,7 +292,7 @@ public class IndexMergerTest IncrementalIndex toPersist2 = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 ); @@ -379,13 +378,13 @@ public class IndexMergerTest { final IncrementalIndex toPersist1 = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{}, 10 ); final IncrementalIndex toPersist2 = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{}, 10 ); @@ -923,7 +922,7 @@ public class IndexMergerTest null )) .withMinTimestamp(0L) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(new AggregatorFactory[]{new CountAggregatorFactory("count")}) .build(); @@ -1142,7 +1141,7 @@ public class IndexMergerTest IncrementalIndex toPersistA = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 ); @@ -1167,7 +1166,7 @@ public class IndexMergerTest IncrementalIndex toPersistB = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 ); @@ -1282,7 +1281,7 @@ public class IndexMergerTest IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder() .withMinTimestamp(0L) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(new AggregatorFactory[]{new CountAggregatorFactory("count")}) .withRollup(false) .build(); @@ -1417,7 +1416,7 @@ public class IndexMergerTest IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder() .withMinTimestamp(0L) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withMetrics(new AggregatorFactory[]{new CountAggregatorFactory("count")}) .withRollup(false) .build(); @@ -1543,7 +1542,7 @@ public class IndexMergerTest IncrementalIndex toPersistBA2 = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 ); @@ -2053,7 +2052,7 @@ public class IndexMergerTest { IncrementalIndex toPersist1 = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 ); @@ -2089,7 +2088,7 @@ public class IndexMergerTest { IncrementalIndex toPersist1 = new OnheapIncrementalIndex( 0L, - Granularity.NONE, + Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000 ); @@ -2115,7 +2114,7 @@ public class IndexMergerTest { IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(0L) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withDimensionsSpec(new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null)) .withMetrics(new AggregatorFactory[]{new CountAggregatorFactory("count")}) .withRollup(true) diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java index 7aa86c23ab1..bffceb2c1fb 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java @@ -28,7 +28,7 @@ import com.google.common.io.Files; import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.data.CompressedObjectStrategy; @@ -168,7 +168,7 @@ public class IndexMergerV9CompatibilityTest { toPersist = new OnheapIncrementalIndex( JodaUtils.MIN_INSTANT, - Granularity.NONE, + Granularities.NONE, DEFAULT_AGG_FACTORIES, 1000000 ); diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java index 534bf5850ac..98d25fcfef9 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java @@ -28,7 +28,7 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.QueryRunner; @@ -104,7 +104,7 @@ public class IndexMergerV9WithSpatialIndexTest { IncrementalIndex theIndex = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -272,7 +272,7 @@ public class IndexMergerV9WithSpatialIndexTest try { IncrementalIndex first = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -296,7 +296,7 @@ public class IndexMergerV9WithSpatialIndexTest ); IncrementalIndex second = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -320,7 +320,7 @@ public class IndexMergerV9WithSpatialIndexTest ); IncrementalIndex third = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -525,7 +525,7 @@ public class IndexMergerV9WithSpatialIndexTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( @@ -579,7 +579,7 @@ public class IndexMergerV9WithSpatialIndexTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( @@ -632,7 +632,7 @@ public class IndexMergerV9WithSpatialIndexTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( diff --git a/processing/src/test/java/io/druid/segment/MetadataTest.java b/processing/src/test/java/io/druid/segment/MetadataTest.java index 664ced16f83..60f3d4703ae 100644 --- a/processing/src/test/java/io/druid/segment/MetadataTest.java +++ b/processing/src/test/java/io/druid/segment/MetadataTest.java @@ -22,8 +22,8 @@ package io.druid.segment; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.LongMaxAggregatorFactory; @@ -51,7 +51,7 @@ public class MetadataTest new LongSumAggregatorFactory("out", "in") }; metadata.setAggregators(aggregators); - metadata.setQueryGranularity(Granularity.ALL); + metadata.setQueryGranularity(Granularities.ALL); metadata.setRollup(Boolean.FALSE); Metadata other = jsonMapper.readValue( @@ -81,14 +81,14 @@ public class MetadataTest m1.put("k", "v"); m1.setAggregators(aggs); m1.setTimestampSpec(new TimestampSpec("ds", "auto", null)); - m1.setQueryGranularity(Granularity.ALL); + m1.setQueryGranularity(Granularities.ALL); m1.setRollup(Boolean.FALSE); Metadata m2 = new Metadata(); m2.put("k", "v"); m2.setAggregators(aggs); m2.setTimestampSpec(new TimestampSpec("ds", "auto", null)); - m2.setQueryGranularity(Granularity.ALL); + m2.setQueryGranularity(Granularities.ALL); m2.setRollup(Boolean.FALSE); Metadata merged = new Metadata(); @@ -100,7 +100,7 @@ public class MetadataTest ); merged.setTimestampSpec(new TimestampSpec("ds", "auto", null)); merged.setRollup(Boolean.FALSE); - merged.setQueryGranularity(Granularity.ALL); + merged.setQueryGranularity(Granularities.ALL); Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), null)); //merge check with one metadata being null @@ -127,7 +127,7 @@ public class MetadataTest ); merged.setTimestampSpec(new TimestampSpec("ds", "auto", null)); - merged.setQueryGranularity(Granularity.ALL); + merged.setQueryGranularity(Granularities.ALL); m1.setRollup(Boolean.TRUE); Assert.assertEquals( merged, diff --git a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java index c303609a8d3..2d4fe6625ba 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java @@ -28,10 +28,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -141,7 +141,7 @@ public class SchemalessIndexTest final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis(); if (theIndex == null) { - theIndex = new OnheapIncrementalIndex(timestamp, Granularity.MINUTE, METRIC_AGGS, 1000); + theIndex = new OnheapIncrementalIndex(timestamp, Granularities.MINUTE, METRIC_AGGS, 1000); } final List dims = Lists.newArrayList(); @@ -351,7 +351,7 @@ public class SchemalessIndexTest } final IncrementalIndex rowIndex = new OnheapIncrementalIndex( - timestamp, Granularity.MINUTE, METRIC_AGGS, 1000 + timestamp, Granularities.MINUTE, METRIC_AGGS, 1000 ); rowIndex.add( @@ -381,7 +381,7 @@ public class SchemalessIndexTest log.info("Realtime loading index file[%s]", filename); final IncrementalIndex retVal = new OnheapIncrementalIndex( - new DateTime("2011-01-12T00:00:00.000Z").getMillis(), Granularity.MINUTE, aggs, 1000 + new DateTime("2011-01-12T00:00:00.000Z").getMillis(), Granularities.MINUTE, aggs, 1000 ); try { diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java index 12c7af6d4a3..03df1951f9c 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -69,7 +70,7 @@ public class SchemalessTestFullTest final double UNIQUES_1 = 1.0002442201269182d; final String dataSource = "testing"; - final Granularity allGran = Granularity.ALL; + final Granularity allGran = Granularities.ALL; final String dimensionValue = "dimension"; final String valueValue = "value"; final String marketDimension = "market"; diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java index 4cd5c6ad820..9f950742129 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java @@ -22,6 +22,7 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.Druids; import io.druid.query.QueryRunner; @@ -95,7 +96,7 @@ public class SchemalessTestSimpleTest } final String dataSource = "testing"; - final Granularity allGran = Granularity.ALL; + final Granularity allGran = Granularities.ALL; final String dimensionValue = "dimension"; final String valueValue = "value"; final String marketDimension = "market"; diff --git a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java index ac9a295ecfa..a5e83b9bf05 100644 --- a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java +++ b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java @@ -22,8 +22,8 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.data.CompressedObjectStrategy; @@ -33,13 +33,13 @@ import io.druid.segment.data.Indexed; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexAdapter; import io.druid.segment.incremental.OnheapIncrementalIndex; -import java.util.Collections; import org.joda.time.Interval; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -65,7 +65,7 @@ public class StringDimensionHandlerTest ) throws Exception { IncrementalIndex incrementalIndex1 = new OnheapIncrementalIndex( TEST_INTERVAL.getStartMillis(), - Granularity.NONE, + Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[]{ @@ -78,7 +78,7 @@ public class StringDimensionHandlerTest IncrementalIndex incrementalIndex2 = new OnheapIncrementalIndex( TEST_INTERVAL.getStartMillis(), - Granularity.NONE, + Granularities.NONE, true, new DimensionsSpec(DimensionsSpec.getDefaultSchemas(dims), null, null), new AggregatorFactory[]{ diff --git a/processing/src/test/java/io/druid/segment/TestIndex.java b/processing/src/test/java/io/druid/segment/TestIndex.java index 735fab47d5d..111ecb16fc9 100644 --- a/processing/src/test/java/io/druid/segment/TestIndex.java +++ b/processing/src/test/java/io/druid/segment/TestIndex.java @@ -28,8 +28,8 @@ import io.druid.data.input.impl.DelimitedParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.hll.HyperLogLogHash; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; @@ -236,7 +236,7 @@ public class TestIndex final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) .withTimestampSpec(new TimestampSpec("ds", "auto", null)) - .withQueryGranularity(Granularity.NONE) + .withQueryGranularity(Granularities.NONE) .withVirtualColumns(VIRTUAL_COLUMNS) .withMetrics(METRIC_AGGS) .withRollup(rollup) diff --git a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java index 75c27d5edf2..93c4ce0b136 100644 --- a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java @@ -34,7 +34,7 @@ import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -134,7 +134,7 @@ public class IncrementalIndexTest public IncrementalIndex createIndex(AggregatorFactory[] factories) { return new OffheapIncrementalIndex( - 0L, Granularity.NONE, factories, 1000000, + 0L, Granularities.NONE, factories, 1000000, new StupidPool( "OffheapIncrementalIndex-bufferPool", new Supplier() @@ -167,7 +167,7 @@ public class IncrementalIndexTest public IncrementalIndex createIndex(AggregatorFactory[] factories) { return new OffheapIncrementalIndex( - 0L, Granularity.NONE, false, factories, 1000000, + 0L, Granularities.NONE, false, factories, 1000000, new StupidPool( "OffheapIncrementalIndex-bufferPool", new Supplier() @@ -207,7 +207,7 @@ public class IncrementalIndexTest } return new OnheapIncrementalIndex( - 0L, Granularity.NONE, true, dimensionsSpec, aggregatorFactories, 1000000 + 0L, Granularities.NONE, true, dimensionsSpec, aggregatorFactories, 1000000 ); } @@ -218,7 +218,7 @@ public class IncrementalIndexTest } return new OnheapIncrementalIndex( - 0L, Granularity.NONE, true, null, aggregatorFactories, 1000000 + 0L, Granularities.NONE, true, null, aggregatorFactories, 1000000 ); } @@ -229,7 +229,7 @@ public class IncrementalIndexTest } return new OnheapIncrementalIndex( - 0L, Granularity.NONE, false, null, aggregatorFactories, 1000000 + 0L, Granularities.NONE, false, null, aggregatorFactories, 1000000 ); } @@ -449,7 +449,7 @@ public class IncrementalIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(new Interval("2000/2030"))) .aggregators(queryAggregatorFactories) .build(); @@ -600,7 +600,7 @@ public class IncrementalIndexTest final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); @@ -678,7 +678,7 @@ public class IncrementalIndexTest ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); @@ -758,7 +758,7 @@ public class IncrementalIndexTest public void testgetDimensions() { final IncrementalIndex incrementalIndex = new OnheapIncrementalIndex( - new IncrementalIndexSchema.Builder().withQueryGranularity(Granularity.NONE) + new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.NONE) .withMetrics( new AggregatorFactory[]{ new CountAggregatorFactory( @@ -786,7 +786,7 @@ public class IncrementalIndexTest public void testDynamicSchemaRollup() throws IndexSizeExceededException { IncrementalIndex index = new OnheapIncrementalIndex( - new IncrementalIndexSchema.Builder().withQueryGranularity(Granularity.NONE).build(), + new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.NONE).build(), true, 10 ); diff --git a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java index 8623e94e3a0..061cc960433 100644 --- a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java @@ -31,7 +31,7 @@ import io.druid.common.guava.SettableSupplier; import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.java.util.common.Pair; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.Aggregator; @@ -303,7 +303,7 @@ public abstract class BaseFilterTest filter, new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT), VIRTUAL_COLUMNS, - Granularity.ALL, + Granularities.ALL, false ); } diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java index 0955e9a1fb4..8ac4fad7178 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java @@ -28,7 +28,7 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.QueryRunner; @@ -118,7 +118,7 @@ public class SpatialFilterBonusTest { IncrementalIndex theIndex = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -257,7 +257,7 @@ public class SpatialFilterBonusTest try { IncrementalIndex first = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -277,7 +277,7 @@ public class SpatialFilterBonusTest ); IncrementalIndex second = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -296,7 +296,7 @@ public class SpatialFilterBonusTest ); IncrementalIndex third = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -458,7 +458,7 @@ public class SpatialFilterBonusTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( @@ -510,7 +510,7 @@ public class SpatialFilterBonusTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( @@ -598,7 +598,7 @@ public class SpatialFilterBonusTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .aggregators( Arrays.asList( diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java index 5562862bca1..f2fe2cd886e 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java @@ -28,7 +28,7 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.QueryRunner; @@ -111,7 +111,7 @@ public class SpatialFilterTest { IncrementalIndex theIndex = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -275,7 +275,7 @@ public class SpatialFilterTest try { IncrementalIndex first = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -299,7 +299,7 @@ public class SpatialFilterTest ); IncrementalIndex second = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -323,7 +323,7 @@ public class SpatialFilterTest ); IncrementalIndex third = new OnheapIncrementalIndex( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) - .withQueryGranularity(Granularity.DAY) + .withQueryGranularity(Granularities.DAY) .withMetrics(METRIC_AGGS) .withDimensionsSpec( new DimensionsSpec( @@ -519,7 +519,7 @@ public class SpatialFilterTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( @@ -572,7 +572,7 @@ public class SpatialFilterTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( @@ -624,7 +624,7 @@ public class SpatialFilterTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java index eae517daea4..b78914cfda1 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexMultiValueSpecTest.java @@ -26,7 +26,7 @@ import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.VirtualColumns; import org.junit.Assert; @@ -54,7 +54,7 @@ public class IncrementalIndexMultiValueSpecTest IncrementalIndexSchema schema = new IncrementalIndexSchema( 0, new TimestampSpec("ds", "auto", null), - Granularity.ALL, + Granularities.ALL, VirtualColumns.EMPTY, dimensionsSpec, new AggregatorFactory[0], diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index 648e75d9668..4abff744fc7 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -29,7 +29,7 @@ import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.js.JavaScriptConfig; @@ -49,9 +49,9 @@ import io.druid.query.topn.TopNResultValue; import io.druid.segment.Cursor; import io.druid.segment.DimensionSelector; import io.druid.segment.StorageAdapter; +import io.druid.segment.VirtualColumns; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.SelectorFilter; -import io.druid.segment.VirtualColumns; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; @@ -97,7 +97,7 @@ public class IncrementalIndexStorageAdapterTest public IncrementalIndex createIndex() { return new OnheapIncrementalIndex( - 0, Granularity.MINUTE, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 + 0, Granularities.MINUTE, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 ); } } @@ -130,7 +130,7 @@ public class IncrementalIndexStorageAdapterTest final Sequence rows = engine.process( GroupByQuery.builder() .setDataSource("test") - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(new Interval(0, new DateTime().getMillis())) .addDimension("billy") .addDimension("sally") @@ -177,7 +177,7 @@ public class IncrementalIndexStorageAdapterTest final Sequence rows = engine.process( GroupByQuery.builder() .setDataSource("test") - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(new Interval(0, new DateTime().getMillis())) .addDimension("billy") .addDimension("sally") @@ -266,7 +266,7 @@ public class IncrementalIndexStorageAdapterTest new SelectorFilter("sally", "bo"), interval, VirtualColumns.EMPTY, - Granularity.NONE, + Granularities.NONE, descending ); @@ -322,7 +322,7 @@ public class IncrementalIndexStorageAdapterTest final Iterable> results = Sequences.toList( engine.query( new TopNQueryBuilder().dataSource("test") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(Lists.newArrayList(new Interval(0, new DateTime().getMillis()))) .dimension("sally") .metric("cnt") @@ -369,7 +369,7 @@ public class IncrementalIndexStorageAdapterTest final Sequence rows = engine.process( GroupByQuery.builder() .setDataSource("test") - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setInterval(new Interval(0, new DateTime().getMillis())) .addDimension("billy") .addDimension("sally") @@ -406,7 +406,7 @@ public class IncrementalIndexStorageAdapterTest final StorageAdapter sa = new IncrementalIndexStorageAdapter(index); Sequence cursors = sa.makeCursors( - null, new Interval(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularity.ALL, false + null, new Interval(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false ); Sequences.toList( diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java index 81f1f248d27..02e7a180e03 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java @@ -29,8 +29,8 @@ import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringDimensionSchema; -import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; @@ -88,7 +88,7 @@ public class IncrementalIndexTest }; final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(0) - .withQueryGranularity(Granularity.MINUTE) + .withQueryGranularity(Granularities.MINUTE) .withDimensionsSpec(dimensions) .withMetrics(metrics) .withRollup(true) diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java index 3027569e25a..3ff81c454d7 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java @@ -33,6 +33,7 @@ import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.parsers.ParseException; @@ -252,7 +253,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark Granularity.class, AggregatorFactory[].class, Integer.TYPE - ).newInstance(0, Granularity.NONE, factories, elementsPerThread * taskCount); + ).newInstance(0, Granularities.NONE, factories, elementsPerThread * taskCount); final ArrayList queryAggregatorFactories = new ArrayList<>(dimensionCount + 1); queryAggregatorFactories.add(new CountAggregatorFactory("rows")); for (int i = 0; i < dimensionCount; ++i) { @@ -339,7 +340,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); @@ -376,7 +377,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java index 95ddc0a51b2..10b30bde40d 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexTest.java @@ -22,7 +22,7 @@ package io.druid.segment.incremental; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongMaxAggregator; @@ -45,7 +45,7 @@ public class OnheapIncrementalIndexTest { final OnheapIncrementalIndex index = new OnheapIncrementalIndex( 0, - Granularity.MINUTE, + Granularities.MINUTE, new AggregatorFactory[]{new LongMaxAggregatorFactory("max", "max")}, MAX_ROWS ); @@ -111,7 +111,7 @@ public class OnheapIncrementalIndexTest final OnheapIncrementalIndex index = new OnheapIncrementalIndex( 0, - Granularity.MINUTE, + Granularities.MINUTE, new AggregatorFactory[]{new LongMaxAggregatorFactory("max", "max")}, MAX_ROWS ); diff --git a/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java b/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java index 6db93156e3f..474986cd937 100644 --- a/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/TimeAndDimsCompTest.java @@ -22,7 +22,7 @@ package io.druid.segment.incremental; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import org.junit.Assert; @@ -42,7 +42,7 @@ public class TimeAndDimsCompTest public void testBasic() throws IndexSizeExceededException { IncrementalIndex index = new OnheapIncrementalIndex( - 0, Granularity.NONE, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 + 0, Granularities.NONE, new AggregatorFactory[]{new CountAggregatorFactory("cnt")}, 1000 ); long time = System.currentTimeMillis(); diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index dbd439ee143..beced264b15 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -28,6 +28,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.PeekingIterator; import com.google.common.collect.Sets; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Comparators; import org.joda.time.DateTime; @@ -50,7 +51,7 @@ public class ArbitraryGranularitySpec implements GranularitySpec @JsonProperty("intervals") List inputIntervals ) { - this.queryGranularity = queryGranularity == null ? Granularity.NONE : queryGranularity; + this.queryGranularity = queryGranularity == null ? Granularities.NONE : queryGranularity; this.rollup = rollup == null ? Boolean.TRUE : rollup; this.intervals = Sets.newTreeSet(Comparators.intervalsByStartThenEnd()); diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java index f4bf6521b92..610329eaa7d 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java @@ -25,9 +25,8 @@ import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; - import org.joda.time.DateTime; import org.joda.time.Interval; @@ -36,8 +35,8 @@ import java.util.SortedSet; public class UniformGranularitySpec implements GranularitySpec { - private static final Granularity DEFAULT_SEGMENT_GRANULARITY = Granularity.DAY; - private static final Granularity DEFAULT_QUERY_GRANULARITY = Granularity.NONE; + private static final Granularity DEFAULT_SEGMENT_GRANULARITY = Granularities.DAY; + private static final Granularity DEFAULT_QUERY_GRANULARITY = Granularities.NONE; private final Granularity segmentGranularity; private final Granularity queryGranularity; diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index bd2a3469774..bf403d221ef 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -55,6 +55,7 @@ import io.druid.hll.HyperLogLogCollector; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.FunctionalIterable; @@ -242,7 +243,7 @@ public class CachingClusteredClientTest ); private static final DimFilter DIM_FILTER = null; private static final List RENAMED_POST_AGGS = ImmutableList.of(); - private static final Granularity GRANULARITY = Granularity.DAY; + private static final Granularity GRANULARITY = Granularities.DAY; private static final DateTimeZone TIMEZONE = DateTimeZone.forID("America/Los_Angeles"); private static final Granularity PT1H_TZ_GRANULARITY = new PeriodGranularity(new Period("PT1H"), null, TIMEZONE); private static final String TOP_DIM = "a_dim"; diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index 614e02ad141..af57a5e97ac 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -30,9 +30,9 @@ import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.client.cache.CacheStats; import io.druid.client.cache.MapCache; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; import io.druid.java.util.common.guava.Sequences; @@ -122,7 +122,7 @@ public class CachingQueryRunnerTest .threshold(3) .intervals("2011-01-05/2011-01-10") .aggregators(AGGS) - .granularity(Granularity.ALL); + .granularity(Granularities.ALL); QueryToolChest toolchest = new TopNQueryQueryToolChest( new TopNQueryConfig(), diff --git a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java index 8abbc04f78a..ae3d259f1eb 100644 --- a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java +++ b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java @@ -24,15 +24,14 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; - import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.DurationGranularity; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.granularity.DurationGranularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.indexing.granularity.ArbitraryGranularitySpec; @@ -75,7 +74,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), }, - new ArbitraryGranularitySpec(Granularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper ); @@ -107,7 +106,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), }, - new ArbitraryGranularitySpec(Granularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper ); @@ -139,7 +138,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), }, - new ArbitraryGranularitySpec(Granularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper ); schema.getParser(); @@ -168,7 +167,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric2", "col2"), new DoubleSumAggregatorFactory("metric1", "col3"), }, - new ArbitraryGranularitySpec(Granularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), jsonMapper ); schema.getParser(); diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java index 372436495cb..a53698ca121 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; @@ -55,7 +55,7 @@ public class ArbitraryGranularityTest public void testSimple() { final GranularitySpec spec = new ArbitraryGranularitySpec( - Granularity.NONE, + Granularities.NONE, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), new Interval("2012-02-01T00Z/2012-03-01T00Z"), @@ -131,7 +131,7 @@ public class ArbitraryGranularityTest boolean thrown = false; try { - final GranularitySpec spec = new ArbitraryGranularitySpec(Granularity.NONE, intervals); + final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, intervals); } catch(IllegalArgumentException e) { thrown = true; } @@ -149,7 +149,7 @@ public class ArbitraryGranularityTest new Interval("2012-01-03T00Z/2012-01-04T00Z"), new Interval("2012-01-01T00Z/2012-01-03T00Z") ); - final GranularitySpec spec = new ArbitraryGranularitySpec(Granularity.NONE, false, intervals); + final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, false, intervals); Assert.assertFalse(spec.isRollup()); } @@ -164,7 +164,7 @@ public class ArbitraryGranularityTest boolean thrown = false; try { - final GranularitySpec spec = new ArbitraryGranularitySpec(Granularity.NONE, intervals); + final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, intervals); } catch(IllegalArgumentException e) { thrown = true; } @@ -175,7 +175,7 @@ public class ArbitraryGranularityTest @Test public void testJson() { - final GranularitySpec spec = new ArbitraryGranularitySpec(Granularity.NONE, Lists.newArrayList( + final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), new Interval("2012-02-01T00Z/2012-03-01T00Z"), new Interval("2012-01-07T00Z/2012-01-08T00Z"), diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java index 6230a9ed5c4..289fea66b93 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java @@ -23,11 +23,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; - -import io.druid.java.util.common.granularity.Granularity; -import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.jackson.DefaultObjectMapper; - +import io.druid.java.util.common.granularity.Granularities; +import io.druid.java.util.common.granularity.PeriodGranularity; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; @@ -48,7 +46,7 @@ public class UniformGranularityTest public void testSimple() { final GranularitySpec spec = new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), @@ -113,7 +111,7 @@ public class UniformGranularityTest new Interval("2012-01-03T00Z/2012-01-04T00Z"), new Interval("2012-01-01T00Z/2012-01-03T00Z") ); - final GranularitySpec spec = new UniformGranularitySpec(Granularity.DAY, Granularity.NONE, false, intervals); + final GranularitySpec spec = new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, false, intervals); Assert.assertFalse(spec.isRollup()); } @@ -122,7 +120,7 @@ public class UniformGranularityTest public void testJson() { final GranularitySpec spec = new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), @@ -155,7 +153,7 @@ public class UniformGranularityTest { final GranularitySpec spec = new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), @@ -167,7 +165,7 @@ public class UniformGranularityTest equalsCheck( spec, new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), @@ -188,7 +186,7 @@ public class UniformGranularityTest public void testNotEquals() { final GranularitySpec spec = new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), @@ -200,7 +198,7 @@ public class UniformGranularityTest notEqualsCheck( spec, new UniformGranularitySpec( - Granularity.YEAR, + Granularities.YEAR, null, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), @@ -212,7 +210,7 @@ public class UniformGranularityTest ); notEqualsCheck( spec, new UniformGranularitySpec( - Granularity.DAY, + Granularities.DAY, null, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-12T00Z"), @@ -224,8 +222,8 @@ public class UniformGranularityTest ); notEqualsCheck( spec, new UniformGranularitySpec( - Granularity.DAY, - Granularity.ALL, + Granularities.DAY, + Granularities.ALL, Lists.newArrayList( new Interval("2012-01-08T00Z/2012-01-11T00Z"), new Interval("2012-01-07T00Z/2012-01-08T00Z"), diff --git a/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java b/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java index 66a65b25bb1..943fbcbf119 100644 --- a/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java +++ b/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java @@ -21,15 +21,14 @@ package io.druid.segment.realtime; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; - import io.druid.client.cache.CacheConfig; import io.druid.client.cache.MapCache; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.TestHelper; @@ -97,7 +96,7 @@ public class FireDepartmentTest new AggregatorFactory[]{ new CountAggregatorFactory("count") }, - new UniformGranularitySpec(Granularity.HOUR, Granularity.MINUTE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), jsonMapper ), new RealtimeIOConfig( diff --git a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java index af271223606..14634225310 100644 --- a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java @@ -28,7 +28,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; - import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; @@ -37,9 +36,9 @@ import io.druid.data.input.FirehoseV2; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.InputRowParser; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.BaseQuery; import io.druid.query.Query; @@ -130,14 +129,14 @@ public class RealtimeManagerTest "test", null, new AggregatorFactory[]{new CountAggregatorFactory("rows")}, - new UniformGranularitySpec(Granularity.HOUR, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper ); schema2 = new DataSchema( "testV2", null, new AggregatorFactory[]{new CountAggregatorFactory("rows")}, - new UniformGranularitySpec(Granularity.HOUR, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper ); RealtimeIOConfig ioConfig = new RealtimeIOConfig( @@ -275,7 +274,7 @@ public class RealtimeManagerTest "testing", null, new AggregatorFactory[]{new CountAggregatorFactory("ignore")}, - new UniformGranularitySpec(Granularity.HOUR, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java index 1ece97cb48a..8fb6619dd83 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java @@ -25,11 +25,10 @@ import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; - import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.Result; @@ -266,7 +265,7 @@ public class AppenderatorTest new LongSumAggregatorFactory("met", "met") ) ) - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .build(); final List> results1 = Lists.newArrayList(); @@ -292,7 +291,7 @@ public class AppenderatorTest new LongSumAggregatorFactory("met", "met") ) ) - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .build(); final List> results2 = Lists.newArrayList(); @@ -322,7 +321,7 @@ public class AppenderatorTest new LongSumAggregatorFactory("met", "met") ) ) - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .build(); final List> results3 = Lists.newArrayList(); @@ -356,7 +355,7 @@ public class AppenderatorTest new LongSumAggregatorFactory("met", "met") ) ) - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .build(); final List> results4 = Lists.newArrayList(); @@ -401,7 +400,7 @@ public class AppenderatorTest new LongSumAggregatorFactory("met", "met") ) ) - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .intervals( new MultipleSpecificSegmentSpec( ImmutableList.of( @@ -437,7 +436,7 @@ public class AppenderatorTest new LongSumAggregatorFactory("met", "met") ) ) - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .intervals( new MultipleSpecificSegmentSpec( ImmutableList.of( @@ -473,7 +472,7 @@ public class AppenderatorTest new LongSumAggregatorFactory("met", "met") ) ) - .granularity(Granularity.DAY) + .granularity(Granularities.DAY) .intervals( new MultipleSpecificSegmentSpec( ImmutableList.of( diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java index 6abae24e9de..6fdbd906161 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTester.java @@ -32,8 +32,8 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.IntervalChunkingQueryRunnerDecorator; import io.druid.query.Query; @@ -116,7 +116,7 @@ public class AppenderatorTester implements AutoCloseable new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met") }, - new UniformGranularitySpec(Granularity.MINUTE, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.MINUTE, Granularities.NONE, null), objectMapper ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java index e14aaa3bc58..657c3251c89 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java @@ -30,9 +30,9 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.guice.GuiceInjectors; import io.druid.initialization.Initialization; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DruidProcessingConfig; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -125,7 +125,7 @@ public class DefaultOfflineAppenderatorFactoryTest new CountAggregatorFactory("count"), new LongSumAggregatorFactory("met", "met") }, - new UniformGranularitySpec(Granularity.MINUTE, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.MINUTE, Granularities.NONE, null), objectMapper ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java index 8a070177550..acf98ef3884 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/FiniteAppenderatorDriverTest.java @@ -29,12 +29,12 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; - import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; +import io.druid.java.util.common.granularity.Granularity; import io.druid.query.SegmentDescriptor; import io.druid.segment.realtime.FireDepartmentMetrics; import io.druid.segment.realtime.plumber.SegmentHandoffNotifier; @@ -95,7 +95,7 @@ public class FiniteAppenderatorDriverTest public void setUp() { appenderatorTester = new AppenderatorTester(MAX_ROWS_IN_MEMORY); - allocator = new TestSegmentAllocator(DATA_SOURCE, Granularity.HOUR); + allocator = new TestSegmentAllocator(DATA_SOURCE, Granularities.HOUR); driver = new FiniteAppenderatorDriver( appenderatorTester.getAppenderator(), allocator, diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java index b4bcd31569f..6bdaf279407 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java @@ -26,7 +26,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.IndexIO; @@ -73,7 +73,7 @@ public class IngestSegmentFirehoseTest ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), null, - Granularity.NONE + Granularities.NONE ); int count = 0; @@ -120,7 +120,7 @@ public class IngestSegmentFirehoseTest IncrementalIndex index = null; try { - index = new OnheapIncrementalIndex(0, Granularity.NONE, aggregators, true, true, true, 5000); + index = new OnheapIncrementalIndex(0, Granularities.NONE, aggregators, true, true, true, 5000); for (String line : rows) { index.add(parser.parse(line)); } diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index 2b0c2c046b3..15457e1c64c 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -35,8 +35,8 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.Query; import io.druid.query.QueryRunnerFactory; @@ -143,7 +143,7 @@ public class RealtimePlumberSchoolTest Map.class ), new AggregatorFactory[]{new CountAggregatorFactory("rows")}, - new UniformGranularitySpec(Granularity.HOUR, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper ); @@ -162,7 +162,7 @@ public class RealtimePlumberSchoolTest Map.class ), new AggregatorFactory[]{new CountAggregatorFactory("rows")}, - new UniformGranularitySpec(Granularity.YEAR, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.YEAR, Granularities.NONE, null), jsonMapper ); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java index a4f4b61f1ba..eaba35314fb 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java @@ -21,11 +21,10 @@ package io.druid.segment.realtime.plumber; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; - import io.druid.data.input.InputRow; import io.druid.data.input.Row; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -51,7 +50,7 @@ public class SinkTest "test", null, new AggregatorFactory[]{new CountAggregatorFactory("rows")}, - new UniformGranularitySpec(Granularity.HOUR, Granularity.MINUTE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.MINUTE, null), new DefaultObjectMapper() ); diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index b0f817f4336..7018d998025 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -29,14 +29,14 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceMetricEvent; - import io.druid.client.cache.CacheConfig; import io.druid.client.cache.LocalCacheProvider; -import io.druid.java.util.common.granularity.Granularity; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IAE; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.granularity.Granularities; +import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.guava.Yielder; @@ -65,7 +65,6 @@ import io.druid.segment.loading.SegmentLoadingException; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; - import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; @@ -170,7 +169,7 @@ public class ServerManagerTest public void testSimpleGet() { Future future = assertQueryable( - Granularity.DAY, + Granularities.DAY, "test", new Interval("P1d/2011-04-01"), ImmutableList.>of( @@ -181,7 +180,7 @@ public class ServerManagerTest future = assertQueryable( - Granularity.DAY, + Granularities.DAY, "test", new Interval("P2d/2011-04-02"), ImmutableList.>of( new Pair("1", new Interval("P1d/2011-04-01")), @@ -198,7 +197,7 @@ public class ServerManagerTest final Interval interval = new Interval("2011-04-01/2011-04-02"); Future future = assertQueryable( - Granularity.DAY, + Granularities.DAY, dataSouce, interval, ImmutableList.>of( new Pair("2", interval) @@ -208,7 +207,7 @@ public class ServerManagerTest dropQueryable(dataSouce, "2", interval); future = assertQueryable( - Granularity.DAY, + Granularities.DAY, dataSouce, interval, ImmutableList.>of( new Pair("1", interval) @@ -223,7 +222,7 @@ public class ServerManagerTest loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); Future future = assertQueryable( - Granularity.DAY, + Granularities.DAY, "test", new Interval("2011-04-04/2011-04-06"), ImmutableList.>of( new Pair("3", new Interval("2011-04-04/2011-04-05")) @@ -235,7 +234,7 @@ public class ServerManagerTest dropQueryable("test", "1", new Interval("2011-04-04/2011-04-05")); future = assertQueryable( - Granularity.HOUR, + Granularities.HOUR, "test", new Interval("2011-04-04/2011-04-04T06"), ImmutableList.>of( new Pair("2", new Interval("2011-04-04T00/2011-04-04T01")), @@ -248,7 +247,7 @@ public class ServerManagerTest waitForTestVerificationAndCleanup(future); future = assertQueryable( - Granularity.HOUR, + Granularities.HOUR, "test", new Interval("2011-04-04/2011-04-04T03"), ImmutableList.>of( new Pair("2", new Interval("2011-04-04T00/2011-04-04T01")), @@ -259,7 +258,7 @@ public class ServerManagerTest waitForTestVerificationAndCleanup(future); future = assertQueryable( - Granularity.HOUR, + Granularities.HOUR, "test", new Interval("2011-04-04T04/2011-04-04T06"), ImmutableList.>of( new Pair("2", new Interval("2011-04-04T04/2011-04-04T05")), @@ -275,7 +274,7 @@ public class ServerManagerTest loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); Future future = assertQueryable( - Granularity.DAY, + Granularities.DAY, "test", new Interval("2011-04-04/2011-04-06"), ImmutableList.>of( new Pair("3", new Interval("2011-04-04/2011-04-05")) @@ -314,7 +313,7 @@ public class ServerManagerTest loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); Future future = assertQueryable( - Granularity.DAY, + Granularities.DAY, "test", new Interval("2011-04-04/2011-04-06"), ImmutableList.>of( new Pair("3", new Interval("2011-04-04/2011-04-05")) @@ -357,7 +356,7 @@ public class ServerManagerTest loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); Future future = assertQueryable( - Granularity.DAY, + Granularities.DAY, "test", new Interval("2011-04-04/2011-04-06"), ImmutableList.>of( new Pair("3", new Interval("2011-04-04/2011-04-05")) diff --git a/services/src/main/java/io/druid/cli/DumpSegment.java b/services/src/main/java/io/druid/cli/DumpSegment.java index 97c14307c7e..fa14574cfca 100644 --- a/services/src/main/java/io/druid/cli/DumpSegment.java +++ b/services/src/main/java/io/druid/cli/DumpSegment.java @@ -35,17 +35,16 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.name.Names; +import io.airlift.airline.Command; +import io.airlift.airline.Option; import io.druid.collections.bitmap.BitmapFactory; import io.druid.collections.bitmap.ConciseBitmapFactory; import io.druid.collections.bitmap.ImmutableBitmap; import io.druid.collections.bitmap.RoaringBitmapFactory; - -import io.airlift.airline.Command; -import io.airlift.airline.Option; -import io.druid.java.util.common.granularity.Granularity; import io.druid.guice.annotations.Json; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -253,7 +252,7 @@ public class DumpSegment extends GuiceRunnable Filters.toFilter(filter), index.getDataInterval().withChronology(ISOChronology.getInstanceUTC()), VirtualColumns.EMPTY, - Granularity.ALL, + Granularities.ALL, false ); diff --git a/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java b/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java index 88cf9ad1224..556646a5054 100644 --- a/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java +++ b/services/src/test/java/io/druid/cli/validate/DruidJsonValidatorTest.java @@ -28,7 +28,7 @@ import io.druid.guice.GuiceInjectors; import io.druid.indexing.common.task.RealtimeIndexTask; import io.druid.indexing.common.task.TaskResource; import io.druid.jackson.DefaultObjectMapper; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.IndexSpec; import io.druid.segment.indexing.DataSchema; @@ -149,7 +149,7 @@ public class DruidJsonValidatorTest "foo", null, new AggregatorFactory[0], - new UniformGranularitySpec(Granularity.HOUR, Granularity.NONE, null), + new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper ), new RealtimeIOConfig( diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java index 26fac4652e5..da0f72c45d1 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java @@ -25,6 +25,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.DataSource; import io.druid.query.dimension.DimensionSpec; @@ -299,7 +300,7 @@ public class DruidQueryBuilder final List dimensions = grouping.getDimensions(); if (dimensions.isEmpty()) { - queryGranularity = Granularity.ALL; + queryGranularity = Granularities.ALL; } else if (dimensions.size() == 1) { final DimensionSpec dimensionSpec = Iterables.getOnlyElement(dimensions); final Granularity gran = ExtractionFns.toQueryGranularity(dimensionSpec.getExtractionFn()); @@ -421,7 +422,7 @@ public class DruidQueryBuilder limitSpec.getLimit(), filtration.getQuerySegmentSpec(), filtration.getDimFilter(), - Granularity.ALL, + Granularities.ALL, grouping.getAggregatorFactories(), grouping.getPostAggregators(), context @@ -454,7 +455,7 @@ public class DruidQueryBuilder filtration.getQuerySegmentSpec(), VirtualColumns.EMPTY, filtration.getDimFilter(), - Granularity.ALL, + Granularities.ALL, grouping.getDimensions(), grouping.getAggregatorFactories(), grouping.getPostAggregators(), @@ -506,7 +507,7 @@ public class DruidQueryBuilder filtration.getQuerySegmentSpec(), descending, filtration.getDimFilter(), - Granularity.ALL, + Granularities.ALL, selectProjection != null ? selectProjection.getDimensions() : ImmutableList.of(), selectProjection != null ? selectProjection.getMetrics() : ImmutableList.of(), null, diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java index 7ee5eefd3c9..cc406ea60a7 100644 --- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.hll.HLLCV1; -import io.druid.java.util.common.granularity.Granularity; +import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -314,14 +314,14 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) .build(), Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec( new PagingSpec( ImmutableMap.of("foo_1970-01-01T00:00:00.000Z_2001-01-03T00:00:00.001Z_1", 5), @@ -352,7 +352,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -386,7 +386,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) .build() @@ -407,7 +407,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .descending(true) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) @@ -433,7 +433,7 @@ public class CalciteQueryTest new DefaultDimensionSpec("dim2", "d1"), new DefaultDimensionSpec("dim2", "d2") )) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .descending(false) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) @@ -456,7 +456,7 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .dimensionSpecs(DIMS(new DefaultDimensionSpec("dim1", "d1"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .descending(true) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) @@ -480,7 +480,7 @@ public class CalciteQueryTest .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -521,14 +521,14 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) .build(), Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec( new PagingSpec( ImmutableMap.of("foo_1970-01-01T00:00:00.000Z_2001-01-03T00:00:00.001Z_1", 5), @@ -541,7 +541,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(NOT(SELECTOR("dim1", "", null))) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) @@ -549,7 +549,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(NOT(SELECTOR("dim1", "", null))) .pagingSpec( new PagingSpec( @@ -599,7 +599,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("cnt", "d0", ValueType.LONG))) .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -620,7 +620,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("m1", "d0", ValueType.FLOAT))) .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -646,7 +646,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .filters(SELECTOR("m1", "1.0", null)) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -667,7 +667,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setAggregatorSpecs(AGGS(new DoubleSumAggregatorFactory("a0", "m1"))) .setHavingSpec( @@ -751,7 +751,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec(FIRST_PAGING_SPEC) .filters( OR( @@ -764,7 +764,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec( new PagingSpec( ImmutableMap.of("foo_1970-01-01T00:00:00.000Z_2001-01-03T00:00:00.001Z_1", 2), @@ -799,7 +799,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec(new PagingSpec(null, 2, true)) .filters( OR( @@ -812,7 +812,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec( new PagingSpec( ImmutableMap.of("foo_1970-01-01T00:00:00.000Z_2001-01-03T00:00:00.001Z_1", 1), @@ -831,7 +831,7 @@ public class CalciteQueryTest Druids.newSelectQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .pagingSpec( new PagingSpec( ImmutableMap.of("foo_1970-01-01T00:00:00.000Z_2001-01-03T00:00:00.001Z_1", 2), @@ -891,7 +891,7 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .filters(SELECTOR("dim1", "foobar", null)) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS( new CountAggregatorFactory("a0"), new LongMaxAggregatorFactory("a1", "cnt") @@ -913,7 +913,7 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .filters(SELECTOR("dim1", "foobar", null)) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS( new CountAggregatorFactory("a0"), new LongMaxAggregatorFactory("a1", "cnt") @@ -934,7 +934,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -954,7 +954,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters( OR( new LikeDimFilter("dim1", "a%", null, null), @@ -980,7 +980,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters( OR( BOUND("cnt", "3", null, false, false, null, StringComparators.NUMERIC), @@ -1006,7 +1006,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(IN("cnt", ImmutableList.of("1", "2"), null)) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -1029,7 +1029,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter( OR( @@ -1058,7 +1058,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators( AGGS( new CountAggregatorFactory("a0"), @@ -1116,7 +1116,7 @@ public class CalciteQueryTest new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim1", "d0")) .metric(new InvertedTopNMetricSpec(new NumericTopNMetricSpec("a2"))) .aggregators(AGGS( @@ -1159,7 +1159,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setAggregatorSpecs( ImmutableList.of( @@ -1214,7 +1214,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setAggregatorSpecs( ImmutableList.of( @@ -1275,7 +1275,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS( new FilteredAggregatorFactory( new LongSumAggregatorFactory("a0", "cnt"), @@ -1331,7 +1331,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS( new LongSumAggregatorFactory("a0", null, "(\"cnt\" * 3)"), new LongSumAggregatorFactory("a1", "cnt", null), @@ -1358,7 +1358,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter(new InDimFilter("dim1", ImmutableList.of("abc", "def", "ghi"), null)) .setAggregatorSpecs( @@ -1385,7 +1385,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(SELECTOR("dim2", "a", null)) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -1406,7 +1406,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS()) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(null) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -1425,7 +1425,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(BOUND("dim1", "a", "b", false, true, null, StringComparators.LEXICOGRAPHIC)) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -1446,7 +1446,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(SELECTOR("dim1", "abc", null)) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -1467,7 +1467,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(NUMERIC_SELECTOR("dim1", "2", null)) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -1489,7 +1489,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -1509,7 +1509,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000-01-01/2000-01-01T00:00:00.001"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -1535,7 +1535,7 @@ public class CalciteQueryTest new Interval("2000-01-02/2000-01-02T00:00:00.001") ) ) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -1563,7 +1563,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000/2001"), new Interval("2002-05-01/2003-05-01"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters( AND( SELECTOR("dim2", "a", null), @@ -1616,7 +1616,7 @@ public class CalciteQueryTest ) ) ) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -1646,7 +1646,7 @@ public class CalciteQueryTest ) ) .filters(NOT(SELECTOR("dim1", "xxx", null))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -1669,7 +1669,7 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) .filters(NOT(SELECTOR("dim2", "a", null))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -1705,7 +1705,7 @@ public class CalciteQueryTest ) ) ) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -1727,7 +1727,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters( BOUND( "cnt", @@ -1758,7 +1758,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter( OR( @@ -1794,7 +1794,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setDimFilter( OR( @@ -1823,7 +1823,7 @@ public class CalciteQueryTest new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) .threshold(10) @@ -1847,7 +1847,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators( AGGS( new LongSumAggregatorFactory("a0", "cnt"), @@ -1883,7 +1883,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators( AGGS( new LongSumAggregatorFactory("a0", "cnt"), @@ -1937,7 +1937,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS( new DefaultDimensionSpec("dim1", "d0"), new DefaultDimensionSpec("dim2", "d1") @@ -1947,14 +1947,14 @@ public class CalciteQueryTest .build() ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("d1", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "a0"))) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setAggregatorSpecs(AGGS( new LongSumAggregatorFactory("a0", "a0"), new CountAggregatorFactory("a1") @@ -1984,7 +1984,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -1992,7 +1992,7 @@ public class CalciteQueryTest ) ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setAggregatorSpecs(AGGS( new LongSumAggregatorFactory("a0", "a0"), new CountAggregatorFactory("a1") @@ -2028,7 +2028,7 @@ public class CalciteQueryTest new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension(new DefaultDimensionSpec("dim2", "d0")) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .metric(new NumericTopNMetricSpec("a0")) @@ -2038,7 +2038,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(IN("dim2", ImmutableList.of("", "a"), null)) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) @@ -2090,7 +2090,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setLimitSpec( @@ -2135,7 +2135,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(NOT(SELECTOR("dim1", "", null))) .setDimensions(DIMS(new ExtractionDimensionSpec( "dim1", @@ -2150,7 +2150,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(IN( "dim2", ImmutableList.of("1", "2", "a", "d"), @@ -2162,7 +2162,7 @@ public class CalciteQueryTest ) ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setAggregatorSpecs(AGGS( new CountAggregatorFactory("a0") )) @@ -2193,7 +2193,7 @@ public class CalciteQueryTest .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) .setDimFilter(NOT(SELECTOR("dim2", "", null))) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -2201,7 +2201,7 @@ public class CalciteQueryTest ) ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setAggregatorSpecs(AGGS( new LongSumAggregatorFactory("a0", "a0"), new CountAggregatorFactory("a1") @@ -2231,7 +2231,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setLimit(1) @@ -2241,7 +2241,7 @@ public class CalciteQueryTest ) .setDimFilter(BOUND("a0", "0", null, true, false, null, StringComparators.NUMERIC)) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setAggregatorSpecs(AGGS( new LongSumAggregatorFactory("a0", "a0"), new CountAggregatorFactory("a1") @@ -2271,7 +2271,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(NOT(SELECTOR("dim1", "", null))) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -2279,7 +2279,7 @@ public class CalciteQueryTest ) ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setAggregatorSpecs(AGGS( new CountAggregatorFactory("a0"), new CardinalityAggregatorFactory( @@ -2348,7 +2348,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setPostAggregatorSpecs(ImmutableList.of( @@ -2359,7 +2359,7 @@ public class CalciteQueryTest ) ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("a1", "d0"))) .setAggregatorSpecs(AGGS( new CountAggregatorFactory("a0") @@ -2391,7 +2391,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -2399,7 +2399,7 @@ public class CalciteQueryTest ) ) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("a0", "d0"))) .setAggregatorSpecs(AGGS( new CountAggregatorFactory("a0") @@ -2440,7 +2440,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators( AGGS( new LongSumAggregatorFactory("a0", "cnt"), @@ -2492,7 +2492,7 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) .filters(NOT(SELECTOR("dim1", "", null))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators( AGGS( new CardinalityAggregatorFactory( @@ -2526,7 +2526,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new DefaultDimensionSpec("dim2", "d1"), @@ -2572,7 +2572,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new DefaultDimensionSpec("dim2", "d1"), @@ -2617,7 +2617,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000/P2M"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2638,7 +2638,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000-01-02/2002"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2660,7 +2660,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000-01-02T08Z/2002-01-01T08Z"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) .build() @@ -2685,7 +2685,7 @@ public class CalciteQueryTest new Interval(Filtration.eternity().getStart(), new DateTime("2001-01-01")), new Interval(new DateTime("2001-02-01"), Filtration.eternity().getEnd()) )) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2707,7 +2707,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval(Filtration.eternity().getStart(), new DateTime("2000-02-01")))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2729,7 +2729,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval(Filtration.eternity().getStart(), new DateTime("2000-03-01")))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2752,7 +2752,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000/P1M"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2775,7 +2775,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(new Interval("2000-02-01/P2M"), new Interval("2000-05-01/P1M"))) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2796,7 +2796,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS()) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -2815,7 +2815,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions(DIMS( new ExtractionDimensionSpec("dim1", "d0", ValueType.FLOAT, new BucketExtractionFn(1.0, 0.0)) )) @@ -2841,7 +2841,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new ExtractionDimensionSpec("dim1", "d0", ValueType.FLOAT, new BucketExtractionFn(1.0, 0.0)) @@ -2884,14 +2884,14 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new ExtractionDimensionSpec( "__time", "d0", ValueType.LONG, - new TimeFormatExtractionFn(null, null, null, Granularity.YEAR, true) + new TimeFormatExtractionFn(null, null, null, Granularities.YEAR, true) ), new DefaultDimensionSpec("dim2", "d1") ) @@ -2945,7 +2945,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new ExtractionDimensionSpec( @@ -2987,7 +2987,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.MONTH) + .granularity(Granularities.MONTH) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .context(TIMESERIES_CONTEXT_DEFAULT) .build() @@ -3123,7 +3123,7 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.MONTH) + .granularity(Granularities.MONTH) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .descending(true) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -3150,7 +3150,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new ExtractionDimensionSpec( @@ -3161,7 +3161,7 @@ public class CalciteQueryTest "Y", DateTimeZone.UTC, null, - Granularity.NONE, + Granularities.NONE, true ) ) @@ -3202,7 +3202,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new ExtractionDimensionSpec( @@ -3213,7 +3213,7 @@ public class CalciteQueryTest "Y", DateTimeZone.UTC, null, - Granularity.YEAR, + Granularities.YEAR, true ) ) @@ -3243,7 +3243,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new ExtractionDimensionSpec( @@ -3289,14 +3289,14 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new ExtractionDimensionSpec( "__time", "d0", ValueType.LONG, - new TimeFormatExtractionFn(null, null, null, Granularity.MONTH, true) + new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true) ) ) ) @@ -3338,13 +3338,13 @@ public class CalciteQueryTest new TopNQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .dimension( new ExtractionDimensionSpec( "__time", "d0", ValueType.LONG, - new TimeFormatExtractionFn(null, null, null, Granularity.MONTH, true) + new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true) ) ) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) @@ -3371,7 +3371,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimensions( DIMS( new DefaultDimensionSpec("dim2", "d1"), @@ -3379,7 +3379,7 @@ public class CalciteQueryTest "__time", "d0", ValueType.LONG, - new TimeFormatExtractionFn(null, null, null, Granularity.MONTH, true) + new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true) ) ) ) @@ -3423,7 +3423,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(NOT(SELECTOR("dim1", "", null))) .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -3431,7 +3431,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter( AND( IN("dim2", ImmutableList.of("1", "10.1", "2", "abc", "def"), null), @@ -3490,7 +3490,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(SELECTOR("dim2", "abc", null)) .setDimensions(DIMS( new DefaultDimensionSpec("dim1", "d0"), @@ -3508,7 +3508,7 @@ public class CalciteQueryTest )) .metrics(ImmutableList.of("cnt")) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(AND(SELECTOR("dim1", "def", null), SELECTOR("dim2", "abc", null))) .pagingSpec(FIRST_PAGING_SPEC) .context(QUERY_CONTEXT_DEFAULT) @@ -3521,7 +3521,7 @@ public class CalciteQueryTest )) .metrics(ImmutableList.of("cnt")) .intervals(QSS(Filtration.eternity())) - .granularity(Granularity.ALL) + .granularity(Granularities.ALL) .filters(AND(SELECTOR("dim1", "def", null), SELECTOR("dim2", "abc", null))) .pagingSpec( new PagingSpec( @@ -3550,7 +3550,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(NOT(SELECTOR("dim1", "", null))) .setDimensions( DIMS(new ExtractionDimensionSpec("dim1", "d0", new SubstringDimExtractionFn(0, 1))) @@ -3560,7 +3560,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE1) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter( IN( "dim2", @@ -3597,7 +3597,7 @@ public class CalciteQueryTest GroupByQuery.builder() .setDataSource(CalciteTests.DATASOURCE2) .setInterval(QSS(Filtration.eternity())) - .setGranularity(Granularity.ALL) + .setGranularity(Granularities.ALL) .setDimFilter(OR( new LikeDimFilter("dim1", "דר%", null, null), new SelectorDimFilter("dim1", "друид", null)