From cbd1902db8efa812f945f8b621fabee0f92672a5 Mon Sep 17 00:00:00 2001 From: Roman Leventov Date: Mon, 21 Aug 2017 15:02:42 -0500 Subject: [PATCH] Add forbidden-apis plugin; prohibit using system time zone (#4611) * Forbidden APIs WIP * Remove some tests * Restore io.druid.math.expr.Function * Integration tests fix * Add comments * Fix in SimpleWorkerProvisioningStrategy * Formatting * Replace String.format() with StringUtils.format() in RemoteTaskRunnerTest * Address comments * Fix GroupByMultiSegmentTest --- .../io/druid/data/input/MapBasedInputRow.java | 3 +- .../java/io/druid/data/input/MapBasedRow.java | 3 +- .../io/druid/timeline/DataSegmentUtils.java | 2 +- .../test/java/io/druid/TestObjectMapper.java | 3 +- .../io/druid/data/input/MapBasedRowTest.java | 7 +- .../input/impl/InputRowParserSerdeTest.java | 8 +- ...fetchableTextFilesFirehoseFactoryTest.java | 4 +- .../data/input/impl/TimestampSpecTest.java | 7 +- .../io/druid/timeline/DataSegmentTest.java | 23 +- .../druid/timeline/DataSegmentUtilsTest.java | 26 +- .../druid/benchmark/ExpressionBenchmark.java | 4 +- .../druid/benchmark/TimeParseBenchmark.java | 3 +- .../benchmark/datagen/BenchmarkSchemas.java | 9 +- .../druid/benchmark/query/SqlBenchmark.java | 5 +- .../benchmark/query/TimeseriesBenchmark.java | 4 +- .../CostBalancerStrategyBenchmark.java | 3 +- .../benchmark/BenchmarkDataGeneratorTest.java | 6 +- codestyle/joda-time-forbidden-apis.txt | 46 + .../main/java/io/druid/audit/AuditEntry.java | 5 +- .../java/io/druid/math/expr/Function.java | 6 +- .../timeline/VersionedIntervalTimeline.java | 4 +- .../io/druid/common/utils/JodaUtilsTest.java | 93 +- .../java/io/druid/concurrent/ExecsTest.java | 2 +- .../druid/concurrent/LifecycleLockTest.java | 4 +- .../VersionedIntervalTimelineTest.java | 392 ++++---- ...asedDruidToTimelineEventConverterTest.java | 3 +- .../azure/AzureDataSegmentKillerTest.java | 4 +- .../azure/AzureDataSegmentPullerTest.java | 4 +- .../azure/AzureDataSegmentPusherTest.java | 6 +- .../CloudFilesDataSegmentPusherTest.java | 4 +- .../DistinctCountTimeseriesQueryTest.java | 3 +- .../DistinctCountTopNQueryTest.java | 4 +- .../google/GoogleDataSegmentKillerTest.java | 4 +- .../google/GoogleDataSegmentPullerTest.java | 4 +- .../google/GoogleDataSegmentPusherTest.java | 6 +- .../graphite/WhiteListBasedConverterTest.java | 3 +- .../input/orc/DruidOrcInputFormatTest.java | 6 +- .../orc/OrcHadoopInputRowParserTest.java | 4 +- .../input/orc/OrcIndexGeneratorJobTest.java | 5 +- .../parquet/ParquetHadoopInputRowParser.java | 3 +- .../rabbitmq/RabbitMQProducerMain.java | 3 +- .../query/scan/ScanQueryRunnerFactory.java | 2 +- .../query/scan/MultiSegmentScanQueryTest.java | 4 +- .../druid/query/scan/ScanQueryRunnerTest.java | 8 +- .../druid/query/scan/ScanQuerySpecTest.java | 4 +- .../src/test/java/DimensionConverterTest.java | 4 +- .../src/test/java/StatsDEmitterTest.java | 11 +- .../TimestampAggregatorFactory.java | 5 +- .../druid/segment/MapVirtualColumnTest.java | 4 +- .../input/AvroStreamInputRowParserTest.java | 3 +- .../SketchAggregationWithSimpleDataTest.java | 7 +- .../loading/HdfsDataSegmentFinderTest.java | 122 ++- .../hdfs/HdfsDataSegmentKillerTest.java | 4 +- .../hdfs/HdfsDataSegmentPusherTest.java | 20 +- .../histogram/sql/QuantileSqlAggregator.java | 2 +- .../ApproximateHistogramTopNQueryTest.java | 4 +- .../druid/indexing/kafka/KafkaIndexTask.java | 3 +- .../kafka/supervisor/KafkaSupervisor.java | 23 +- .../indexing/kafka/KafkaIOConfigTest.java | 6 +- .../kafka/KafkaIndexTaskClientTest.java | 5 +- .../indexing/kafka/KafkaIndexTaskTest.java | 11 +- .../kafka/supervisor/KafkaSupervisorTest.java | 39 +- .../lookup/namespace/JdbcCacheGenerator.java | 2 +- .../protobuf/ProtobufInputRowParserTest.java | 3 +- .../storage/s3/S3DataSegmentArchiverTest.java | 4 +- .../storage/s3/S3DataSegmentFinderTest.java | 91 +- .../storage/s3/S3DataSegmentMoverTest.java | 9 +- .../storage/s3/S3DataSegmentPusherTest.java | 4 +- .../variance/VarianceTestHelper.java | 3 +- .../variance/VarianceTimeseriesQueryTest.java | 6 +- .../variance/VarianceTopNQueryTest.java | 4 +- .../druid/hll/HyperLogLogCollectorTest.java | 38 +- .../main/java/io/druid/indexer/Bucket.java | 5 +- .../indexer/DetermineHashedPartitionsJob.java | 7 +- .../druid/indexer/DeterminePartitionsJob.java | 8 +- .../indexer/HadoopDruidIndexerConfig.java | 7 +- .../indexer/HadoopDruidIndexerMapper.java | 4 +- .../io/druid/indexer/HadoopTuningConfig.java | 6 +- .../main/java/io/druid/indexer/JobHelper.java | 8 +- .../hadoop/DatasourceIngestionSpec.java | 2 +- .../path/GranularUnprocessedPathSpec.java | 7 +- .../indexer/path/GranularityPathSpec.java | 2 +- .../indexer/BatchDeltaIngestionTest.java | 5 +- .../java/io/druid/indexer/BucketTest.java | 10 +- .../DetermineHashedPartitionsJobTest.java | 4 +- .../indexer/DeterminePartitionsJobTest.java | 4 +- .../indexer/HadoopDruidIndexerConfigTest.java | 22 +- .../indexer/HadoopIngestionSpecTest.java | 4 +- ...cUpdateDatasourcePathSpecSegmentsTest.java | 11 +- .../indexer/IndexGeneratorCombinerTest.java | 12 +- .../druid/indexer/IndexGeneratorJobTest.java | 3 +- .../java/io/druid/indexer/JobHelperTest.java | 5 +- .../hadoop/DatasourceIngestionSpecTest.java | 9 +- .../hadoop/DatasourceInputFormatTest.java | 10 +- .../hadoop/DatasourceInputSplitTest.java | 5 +- .../hadoop/WindowedDataSegmentTest.java | 5 +- .../indexer/path/DatasourcePathSpecTest.java | 10 +- .../indexer/path/GranularityPathSpecTest.java | 6 +- .../updater/HadoopConverterJobTest.java | 3 +- .../HadoopDruidConverterConfigTest.java | 4 +- .../ActionBasedUsedSegmentChecker.java | 2 +- .../common/actions/SegmentListUsedAction.java | 2 +- .../indexing/common/task/AbstractTask.java | 4 +- .../indexing/common/task/AppendTask.java | 3 +- .../common/task/ConvertSegmentTask.java | 5 +- .../indexing/common/task/HadoopIndexTask.java | 6 +- .../druid/indexing/common/task/IndexTask.java | 9 +- .../indexing/common/task/MergeTaskBase.java | 3 +- .../druid/indexing/common/task/NoopTask.java | 4 +- .../common/task/RealtimeIndexTask.java | 3 +- .../common/task/SameIntervalMergeTask.java | 4 +- .../indexing/overlord/ForkingTaskRunner.java | 3 +- .../overlord/HeapMemoryTaskStorage.java | 3 +- .../overlord/MetadataTaskStorage.java | 5 +- .../indexing/overlord/RemoteTaskRunner.java | 6 +- .../overlord/RemoteTaskRunnerWorkQueue.java | 4 +- .../druid/indexing/overlord/TaskLockbox.java | 17 +- .../indexing/overlord/TaskRunnerWorkItem.java | 13 +- .../overlord/ThreadPoolTaskRunner.java | 4 +- .../io/druid/indexing/overlord/ZkWorker.java | 4 +- .../AbstractWorkerProvisioningStrategy.java | 4 +- ...ngTaskBasedWorkerProvisioningStrategy.java | 17 +- .../ProvisioningSchedulerConfig.java | 3 +- .../overlord/autoscaling/ScalingStats.java | 17 +- .../SimpleWorkerProvisioningStrategy.java | 13 +- .../overlord/http/OverlordResource.java | 12 +- .../worker/WorkerCuratorCoordinator.java | 7 +- .../worker/executor/ExecutorLifecycle.java | 5 +- .../ActionBasedUsedSegmentCheckerTest.java | 30 +- .../indexing/common/TaskToolboxTest.java | 4 +- .../actions/RemoteTaskActionClientTest.java | 4 +- .../actions/SegmentAllocateActionTest.java | 5 +- .../actions/SegmentInsertActionTest.java | 7 +- .../actions/SegmentListUsedActionTest.java | 7 +- .../SegmentTransactionalInsertActionTest.java | 9 +- .../common/task/ConvertSegmentTaskTest.java | 11 +- .../task/HadoopConverterTaskSerDeTest.java | 3 +- .../indexing/common/task/IndexTaskTest.java | 47 +- .../common/task/MergeTaskBaseTest.java | 10 +- .../common/task/RealtimeIndexTaskTest.java | 5 +- .../task/SameIntervalMergeTaskTest.java | 12 +- .../indexing/common/task/TaskSerdeTest.java | 56 +- .../IngestSegmentFirehoseFactoryTest.java | 10 +- ...estSegmentFirehoseFactoryTimelineTest.java | 13 +- .../overlord/ImmutableWorkerInfoTest.java | 36 +- .../indexing/overlord/RealtimeishTask.java | 5 +- .../overlord/RemoteTaskRunnerTest.java | 13 +- .../indexing/overlord/TaskLifecycleTest.java | 40 +- .../indexing/overlord/TaskLockboxTest.java | 34 +- ...dingTaskBasedProvisioningStrategyTest.java | 3 +- .../SimpleProvisioningStrategyTest.java | 15 +- ...nWithAffinityWorkerSelectStrategyTest.java | 22 +- ...lDistributionWorkerSelectStrategyTest.java | 18 +- ...yWithAffinityWorkerSelectStrategyTest.java | 12 +- .../supervisor/SupervisorManagerTest.java | 4 +- .../supervisor/SupervisorResourceTest.java | 6 +- integration-tests/pom.xml | 20 + .../src/main/java/org/testng/TestNG.java | 50 +- .../java/org/testng/remote/RemoteTestNG.java | 9 +- .../tests/indexer/AbstractIndexerTest.java | 5 +- .../indexer/ITRealtimeIndexTaskTest.java | 9 +- .../druid/tests/indexer/ITUnionQueryTest.java | 3 +- .../io/druid/java/util/common/DateTimes.java | 59 ++ .../io/druid/java/util/common/Intervals.java | 26 +- .../io/druid/java/util/common}/JodaUtils.java | 3 +- .../common/granularity/AllGranularity.java | 14 +- .../granularity/DurationGranularity.java | 9 +- .../util/common/granularity/Granularity.java | 26 +- .../common/granularity/GranularityType.java | 4 +- .../common/granularity/NoneGranularity.java | 10 +- .../common/granularity/PeriodGranularity.java | 5 +- .../util/common/parsers/TimestampParser.java | 13 +- .../java/util/common/GranularityTest.java | 227 ++--- .../util/common/guava/ComparatorsTest.java | 77 +- .../common/parsers/TimestampParserTest.java | 94 +- pom.xml | 50 +- .../main/java/io/druid/jackson/JodaStuff.java | 8 +- .../src/main/java/io/druid/query/Druids.java | 3 +- .../query/IntervalChunkingQueryRunner.java | 15 +- .../java/io/druid/query/TimewarpOperator.java | 10 +- .../DataSourceMetadataQuery.java | 18 +- .../io/druid/query/expression/ExprUtils.java | 17 +- .../expression/TimestampCeilExprMacro.java | 6 +- .../expression/TimestampExtractExprMacro.java | 5 +- .../expression/TimestampFloorExprMacro.java | 6 +- .../extraction/TimeFormatExtractionFn.java | 8 +- .../druid/query/filter/IntervalDimFilter.java | 2 +- .../query/groupby/GroupByQueryHelper.java | 8 +- .../epinephelinae/GroupByQueryEngineV2.java | 3 +- .../groupby/strategy/GroupByStrategyV2.java | 7 +- .../druid/query/metadata/SegmentAnalyzer.java | 5 +- .../SegmentMetadataQueryQueryToolChest.java | 2 +- .../metadata/SegmentMetadataQuery.java | 17 +- .../search/SearchQueryQueryToolChest.java | 6 +- .../io/druid/query/select/EventHolder.java | 7 +- .../druid/query/select/SelectQueryEngine.java | 8 +- .../druid/query/spec/LegacySegmentSpec.java | 19 +- .../spec/MultipleIntervalSegmentSpec.java | 2 +- .../spec/MultipleSpecificSegmentSpec.java | 2 +- .../query/timeboundary/TimeBoundaryQuery.java | 22 +- .../TimeBoundaryQueryQueryToolChest.java | 4 +- .../TimeBoundaryQueryRunnerFactory.java | 3 +- .../timeboundary/TimeBoundaryResultValue.java | 13 +- .../main/java/io/druid/segment/IndexIO.java | 5 +- .../java/io/druid/segment/IndexMergerV9.java | 15 +- .../segment/QueryableIndexStorageAdapter.java | 16 +- .../main/java/io/druid/segment/Rowboat.java | 4 +- .../BlockLayoutIndexedDoubleSupplier.java | 3 +- .../EntireLayoutIndexedDoubleSupplier.java | 3 +- .../segment/incremental/IncrementalIndex.java | 14 +- .../IncrementalIndexStorageAdapter.java | 5 +- .../collections/CombiningIterableTest.java | 8 +- .../granularity/QueryGranularityTest.java | 328 ++++--- .../jackson/DefaultObjectMapperTest.java | 3 +- .../druid/query/MultiValuedDimensionTest.java | 4 +- .../io/druid/query/QueryContextsTest.java | 8 +- .../io/druid/query/QueryRunnerTestHelper.java | 18 +- ...ResultGranularTimestampComparatorTest.java | 3 +- .../io/druid/query/RetryQueryRunnerTest.java | 63 +- .../io/druid/query/SchemaEvolutionTest.java | 4 +- .../io/druid/query/TimewarpOperatorTest.java | 55 +- .../DataSourceMetadataQueryTest.java | 22 +- .../druid/query/expression/ExprMacroTest.java | 34 +- .../JavaScriptExtractionFnTest.java | 4 +- .../TimeFormatExtractionFnTest.java | 14 +- .../filter/GetDimensionRangeSetTest.java | 10 +- .../query/filter/IntervalDimFilterTest.java | 70 +- .../DefaultGroupByQueryMetricsTest.java | 3 +- .../groupby/GroupByMultiSegmentTest.java | 6 +- .../query/groupby/GroupByQueryConfigTest.java | 6 +- .../query/groupby/GroupByQueryRunnerTest.java | 71 +- .../groupby/GroupByQueryRunnerTestHelper.java | 9 +- .../GroupByTimeseriesQueryRunnerTest.java | 5 +- .../groupby/orderby/DefaultLimitSpecTest.java | 5 +- ...egmentMetadataQueryQueryToolChestTest.java | 6 +- .../metadata/SegmentMetadataQueryTest.java | 54 +- .../SegmentMetadataUnionQueryTest.java | 4 +- .../query/search/SearchBinaryFnTest.java | 3 +- .../search/SearchQueryQueryToolChestTest.java | 19 +- .../query/search/SearchQueryRunnerTest.java | 12 +- .../search/SearchQueryRunnerWithCaseTest.java | 4 +- .../select/MultiSegmentSelectQueryTest.java | 7 +- .../query/select/SelectBinaryFnTest.java | 36 +- .../query/select/SelectQueryRunnerTest.java | 62 +- .../query/select/SelectQuerySpecTest.java | 6 +- .../query/spec/QuerySegmentSpecTest.java | 16 +- .../spec/SpecificSegmentQueryRunnerTest.java | 14 +- .../TimeBoundaryQueryQueryToolChestTest.java | 97 +- .../TimeBoundaryQueryRunnerTest.java | 24 +- .../TimeSeriesUnionQueryRunnerTest.java | 24 +- .../timeseries/TimeseriesBinaryFnTest.java | 3 +- .../TimeseriesQueryQueryToolChestTest.java | 14 +- .../TimeseriesQueryRunnerBonusTest.java | 16 +- .../timeseries/TimeseriesQueryRunnerTest.java | 188 ++-- .../query/topn/TopNBinaryFnBenchmark.java | 3 +- .../io/druid/query/topn/TopNBinaryFnTest.java | 3 +- .../topn/TopNQueryQueryToolChestTest.java | 30 +- .../druid/query/topn/TopNQueryRunnerTest.java | 212 ++--- .../query/topn/TopNQueryRunnerTestHelper.java | 4 +- .../druid/query/topn/TopNUnionQueryTest.java | 4 +- .../java/io/druid/segment/AppendTest.java | 58 +- .../java/io/druid/segment/EmptyIndexTest.java | 6 +- .../java/io/druid/segment/IndexIOTest.java | 3 +- .../io/druid/segment/IndexMergerTestBase.java | 12 +- .../IndexMergerV9CompatibilityTest.java | 2 +- .../IndexMergerV9WithSpatialIndexTest.java | 97 +- .../segment/ReferenceCountingSegmentTest.java | 4 +- .../io/druid/segment/SchemalessIndexTest.java | 13 +- .../druid/segment/SchemalessTestFullTest.java | 172 ++-- .../segment/SchemalessTestSimpleTest.java | 18 +- .../segment/StringDimensionHandlerTest.java | 3 +- .../java/io/druid/segment/TestHelper.java | 8 +- .../test/java/io/druid/segment/TestIndex.java | 7 +- .../segment/data/IncrementalIndexTest.java | 5 +- .../druid/segment/filter/AndFilterTest.java | 4 +- .../druid/segment/filter/BaseFilterTest.java | 5 +- .../druid/segment/filter/BoundFilterTest.java | 4 +- .../filter/ColumnComparisonFilterTest.java | 4 +- .../segment/filter/ExpressionFilterTest.java | 4 +- .../segment/filter/FilterPartitionTest.java | 4 +- .../filter/FloatAndDoubleFilteringTest.java | 4 +- .../io/druid/segment/filter/InFilterTest.java | 4 +- .../segment/filter/InvalidFilteringTest.java | 4 +- .../segment/filter/JavaScriptFilterTest.java | 4 +- .../druid/segment/filter/LikeFilterTest.java | 4 +- .../segment/filter/LongFilteringTest.java | 4 +- .../druid/segment/filter/NotFilterTest.java | 4 +- .../druid/segment/filter/RegexFilterTest.java | 4 +- .../segment/filter/SearchQueryFilterTest.java | 4 +- .../segment/filter/SelectorFilterTest.java | 4 +- .../filter/SpatialFilterBonusTest.java | 89 +- .../segment/filter/SpatialFilterTest.java | 97 +- .../segment/filter/TimeFilteringTest.java | 36 +- .../IncrementalIndexStorageAdapterTest.java | 30 +- .../incremental/IncrementalIndexTest.java | 17 +- .../OnheapIncrementalIndexBenchmark.java | 3 +- .../druid/client/CachingClusteredClient.java | 5 +- .../CuratorDruidNodeDiscoveryProvider.java | 3 +- .../SQLMetadataStorageUpdaterJobHandler.java | 5 +- .../IndexerSQLMetadataStorageCoordinator.java | 15 +- .../metadata/SQLMetadataRuleManager.java | 5 +- .../metadata/SQLMetadataSegmentManager.java | 13 +- .../metadata/SQLMetadataSegmentPublisher.java | 5 +- .../SQLMetadataSupervisorManager.java | 6 +- .../granularity/ArbitraryGranularitySpec.java | 4 +- .../appenderator/AppenderatorImpl.java | 4 +- .../appenderator/AppenderatorPlumber.java | 23 +- .../EventReceiverFirehoseFactory.java | 3 +- .../firehose/IngestSegmentFirehose.java | 4 +- .../realtime/firehose/IrcFirehoseFactory.java | 3 +- .../plumber/CustomVersioningPolicy.java | 4 +- .../realtime/plumber/FlushingPlumber.java | 7 +- .../MessageTimeRejectionPolicyFactory.java | 5 +- .../plumber/NoopRejectionPolicyFactory.java | 3 +- .../realtime/plumber/RealtimePlumber.java | 30 +- .../ServerTimeRejectionPolicyFactory.java | 3 +- .../server/AsyncQueryForwardingServlet.java | 8 +- .../io/druid/server/ClientInfoResource.java | 14 +- .../java/io/druid/server/QueryLifecycle.java | 4 +- .../druid/server/audit/SQLAuditManager.java | 3 +- .../BatchDataSegmentAnnouncer.java | 4 +- .../server/coordinator/DruidCoordinator.java | 5 +- .../DruidCoordinatorRuntimeParams.java | 3 +- .../helper/DruidCoordinatorRuleRunner.java | 3 +- .../helper/DruidCoordinatorSegmentKiller.java | 9 +- .../helper/DruidCoordinatorSegmentMerger.java | 3 +- .../io/druid/server/http/ClusterResource.java | 3 +- .../CoordinatorDynamicConfigsResource.java | 3 +- .../server/http/DatasourcesResource.java | 28 +- .../server/http/HostAndPortWithScheme.java | 8 +- .../druid/server/http/IntervalsResource.java | 4 +- .../io/druid/server/http/RulesResource.java | 3 +- .../announcer/ListenerResourceAnnouncer.java | 3 +- .../druid/server/log/FileRequestLogger.java | 8 +- .../router/TieredBrokerHostSelector.java | 3 +- .../io/druid/client/BrokerServerViewTest.java | 19 +- ...chingClusteredClientFunctionalityTest.java | 9 +- .../client/CachingClusteredClientTest.java | 848 +++++++++--------- .../druid/client/CachingQueryRunnerTest.java | 21 +- .../client/CoordinatorServerViewTest.java | 26 +- .../java/io/druid/client/DataSegmentTest.java | 23 +- .../druid/client/DirectDruidClientTest.java | 18 +- .../client/BatchServerInventoryViewTest.java | 5 +- .../client/ImmutableSegmentLoadInfoTest.java | 6 +- .../indexing/ClientAppendQueryTest.java | 7 +- .../indexing/ClientConversionQueryTest.java | 8 +- .../client/indexing/ClientKillQueryTest.java | 4 +- .../client/indexing/ClientMergeQueryTest.java | 6 +- .../client/selector/ServerSelectorTest.java | 7 +- .../selector/TierSelectorStrategyTest.java | 8 +- ...exerSQLMetadataStorageCoordinatorTest.java | 29 +- .../metadata/MetadataSegmentManagerTest.java | 12 +- .../metadata/SQLMetadataRuleManagerTest.java | 8 +- .../SQLMetadataStorageActionHandlerTest.java | 20 +- .../LocatedSegmentDescriptorSerdeTest.java | 4 +- .../CombiningFirehoseFactoryTest.java | 4 +- .../segment/indexing/DataSchemaTest.java | 12 +- .../granularity/ArbitraryGranularityTest.java | 85 +- .../granularity/UniformGranularityTest.java | 117 +-- .../loading/LocalDataSegmentFinderTest.java | 12 +- .../loading/LocalDataSegmentKillerTest.java | 4 +- .../loading/LocalDataSegmentPusherTest.java | 20 +- .../SegmentLoaderLocalCacheManagerTest.java | 4 +- .../segment/loading/StorageLocationTest.java | 4 +- .../segment/realtime/RealtimeManagerTest.java | 24 +- .../AppenderatorDriverFailTest.java | 8 +- .../appenderator/AppenderatorDriverTest.java | 28 +- .../appenderator/AppenderatorTest.java | 44 +- .../realtime/appenderator/CommittedTest.java | 8 +- ...DefaultOfflineAppenderatorFactoryTest.java | 4 +- .../appenderator/SegmentIdentifierTest.java | 3 +- .../firehose/EventReceiverFirehoseTest.java | 6 +- .../firehose/IngestSegmentFirehoseTest.java | 10 +- ...inatorBasedSegmentHandoffNotifierTest.java | 19 +- .../IntervalStartVersioningPolicyTest.java | 4 +- ...MessageTimeRejectionPolicyFactoryTest.java | 3 +- .../plumber/RealtimePlumberSchoolTest.java | 26 +- .../ServerTimeRejectionPolicyFactoryTest.java | 3 +- .../segment/realtime/plumber/SinkTest.java | 18 +- .../druid/server/ClientInfoResourceTest.java | 13 +- .../io/druid/server/SegmentManagerTest.java | 21 +- .../server/audit/SQLAuditManagerTest.java | 24 +- .../SegmentChangeRequestDropTest.java | 3 +- .../SegmentChangeRequestLoadTest.java | 3 +- .../coordination/ServerManagerTest.java | 101 +-- .../coordination/ZkCoordinatorTest.java | 41 +- .../BatchDataSegmentAnnouncerTest.java | 8 +- .../CostBalancerStrategyBenchmark.java | 5 +- .../coordinator/CostBalancerStrategyTest.java | 8 +- ...iskNormalizedCostBalancerStrategyTest.java | 3 +- .../server/coordinator/DruidClusterTest.java | 6 +- .../DruidCoordinatorBalancerProfiler.java | 10 +- .../DruidCoordinatorBalancerTest.java | 9 +- .../DruidCoordinatorRuleRunnerTest.java | 90 +- .../DruidCoordinatorSegmentMergerTest.java | 214 ++--- .../coordinator/DruidCoordinatorTest.java | 19 +- .../server/coordinator/LoadQueuePeonTest.java | 4 +- .../ReservoirSegmentSamplerTest.java | 7 +- .../server/coordinator/ServerHolderTest.java | 6 +- ...uidCoordinatorCleanupOvershadowedTest.java | 3 +- .../DruidCoordinatorSegmentKillerTest.java | 37 +- .../BroadcastDistributionRuleSerdeTest.java | 8 +- .../rules/BroadcastDistributionRuleTest.java | 22 +- .../rules/IntervalLoadRuleTest.java | 6 +- .../coordinator/rules/LoadRuleTest.java | 16 +- .../coordinator/rules/PeriodDropRuleTest.java | 7 +- .../coordinator/rules/PeriodLoadRuleTest.java | 14 +- .../server/http/DatasourcesResourceTest.java | 15 +- .../server/http/IntervalsResourceTest.java | 15 +- .../druid/server/http/RulesResourceTest.java | 23 +- .../server/http/ServersResourceTest.java | 4 +- .../ListenerResourceAnnouncerTest.java | 5 +- .../server/log/FileRequestLoggerTest.java | 3 +- .../server/log/LoggingRequestLoggerTest.java | 3 +- .../metrics/HistoricalMetricsMonitorTest.java | 4 +- .../server/router/QueryHostFinderTest.java | 4 +- .../router/TieredBrokerHostSelectorTest.java | 37 +- .../server/shard/NumberedShardSpecTest.java | 3 +- .../HashBasedNumberedShardSpecTest.java | 5 +- services/pom.xml | 21 + .../java/io/druid/sql/avatica/DruidMeta.java | 4 +- .../ApproxCountDistinctSqlAggregator.java | 3 +- .../aggregation/DimensionExpression.java | 3 +- .../expression/CeilOperatorConversion.java | 3 +- .../calcite/expression/DruidExpression.java | 3 +- .../sql/calcite/expression/Expressions.java | 13 +- .../expression/FloorOperatorConversion.java | 3 +- .../expression/LookupOperatorConversion.java | 3 +- .../RegexpExtractOperatorConversion.java | 3 +- .../calcite/expression/SimpleExtraction.java | 3 +- .../SubstringOperatorConversion.java | 3 +- .../TimeArithmeticOperatorConversion.java | 5 +- .../TimeExtractOperatorConversion.java | 5 +- .../sql/calcite/filtration/Filtration.java | 7 +- .../sql/calcite/filtration/RangeSets.java | 3 +- .../druid/sql/calcite/planner/Calcites.java | 8 +- .../sql/calcite/planner/DruidRexExecutor.java | 4 +- .../druid/sql/calcite/rel/DruidSemiJoin.java | 2 +- .../io/druid/sql/calcite/rel/QueryMaker.java | 45 +- .../druid/sql/calcite/rule/GroupByRules.java | 5 +- .../druid/sql/calcite/schema/DruidSchema.java | 5 +- .../sql/avatica/DruidAvaticaHandlerTest.java | 5 +- .../druid/sql/avatica/DruidStatementTest.java | 26 +- .../druid/sql/calcite/CalciteQueryTest.java | 69 +- .../calcite/expression/ExpressionsTest.java | 39 +- .../calcite/filtration/FiltrationTest.java | 6 +- .../sql/calcite/schema/DruidSchemaTest.java | 6 +- .../druid/sql/calcite/util/CalciteTests.java | 3 +- 448 files changed, 4231 insertions(+), 4104 deletions(-) create mode 100644 codestyle/joda-time-forbidden-apis.txt create mode 100644 java-util/src/main/java/io/druid/java/util/common/DateTimes.java rename indexing-hadoop/src/main/java/io/druid/indexer/StringIntervalFunction.java => java-util/src/main/java/io/druid/java/util/common/Intervals.java (57%) rename {common/src/main/java/io/druid/common/utils => java-util/src/main/java/io/druid/java/util/common}/JodaUtils.java (97%) diff --git a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java b/api/src/main/java/io/druid/data/input/MapBasedInputRow.java index 61fe512e2fc..4d83b265955 100644 --- a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java +++ b/api/src/main/java/io/druid/data/input/MapBasedInputRow.java @@ -19,6 +19,7 @@ package io.druid.data.input; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import java.util.List; @@ -60,7 +61,7 @@ public class MapBasedInputRow extends MapBasedRow implements InputRow public String toString() { return "MapBasedInputRow{" + - "timestamp=" + new DateTime(getTimestampFromEpoch()) + + "timestamp=" + DateTimes.utc(getTimestampFromEpoch()) + ", event=" + getEvent() + ", dimensions=" + dimensions + '}'; diff --git a/api/src/main/java/io/druid/data/input/MapBasedRow.java b/api/src/main/java/io/druid/data/input/MapBasedRow.java index 0d4cbc8be8e..ff80d65321d 100644 --- a/api/src/main/java/io/druid/data/input/MapBasedRow.java +++ b/api/src/main/java/io/druid/data/input/MapBasedRow.java @@ -22,6 +22,7 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.parsers.ParseException; import org.joda.time.DateTime; @@ -54,7 +55,7 @@ public class MapBasedRow implements Row Map event ) { - this(new DateTime(timestamp), event); + this(DateTimes.utc(timestamp), event); } @Override diff --git a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java index 4d8690e9576..eced6598d83 100644 --- a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java +++ b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java @@ -91,7 +91,7 @@ public class DataSegmentUtils return new SegmentIdentifierParts( dataSource, - new Interval(start.getMillis(), end.getMillis()), + new Interval(start, end), version, trail ); diff --git a/api/src/test/java/io/druid/TestObjectMapper.java b/api/src/test/java/io/druid/TestObjectMapper.java index 740176fb0c5..8ce6fedf843 100644 --- a/api/src/test/java/io/druid/TestObjectMapper.java +++ b/api/src/test/java/io/druid/TestObjectMapper.java @@ -29,6 +29,7 @@ import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import java.io.IOException; @@ -61,7 +62,7 @@ public class TestObjectMapper extends ObjectMapper JsonParser jsonParser, DeserializationContext deserializationContext ) throws IOException, JsonProcessingException { - return new Interval(jsonParser.getText()); + return Intervals.of(jsonParser.getText()); } } ); diff --git a/api/src/test/java/io/druid/data/input/MapBasedRowTest.java b/api/src/test/java/io/druid/data/input/MapBasedRowTest.java index 8d98e11ddcf..05241f50e05 100644 --- a/api/src/test/java/io/druid/data/input/MapBasedRowTest.java +++ b/api/src/test/java/io/druid/data/input/MapBasedRowTest.java @@ -19,19 +19,18 @@ package io.druid.data.input; -import org.joda.time.DateTime; +import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; -import com.google.common.collect.ImmutableMap; - public class MapBasedRowTest { @Test public void testGetLongMetricFromString() { MapBasedRow row = new MapBasedRow( - new DateTime(), + DateTimes.nowUtc(), ImmutableMap.builder() .put("k0", "-1.2") .put("k1", "1.23") diff --git a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java b/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java index 13164db3f9a..144c14418d7 100644 --- a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java +++ b/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java @@ -27,8 +27,8 @@ import com.google.common.collect.Lists; import io.druid.TestObjectMapper; import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -63,7 +63,7 @@ public class InputRowParserSerdeTest Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions()); Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo")); Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar")); - Assert.assertEquals(new DateTime("2000").getMillis(), parsed.getTimestampFromEpoch()); + Assert.assertEquals(DateTimes.of("2000").getMillis(), parsed.getTimestampFromEpoch()); } @Test @@ -79,7 +79,7 @@ public class InputRowParserSerdeTest Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions()); Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo")); Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar")); - Assert.assertEquals(new DateTime("3000").getMillis(), parsed.getTimestampFromEpoch()); + Assert.assertEquals(DateTimes.of("3000").getMillis(), parsed.getTimestampFromEpoch()); } } @@ -219,7 +219,7 @@ public class InputRowParserSerdeTest Assert.assertEquals(ImmutableList.of("asdf"), parsed.getDimension("hey0barx")); Assert.assertEquals(ImmutableList.of("456"), parsed.getDimension("metA")); Assert.assertEquals(ImmutableList.of("5"), parsed.getDimension("newmet")); - Assert.assertEquals(new DateTime("2999").getMillis(), parsed.getTimestampFromEpoch()); + Assert.assertEquals(DateTimes.of("2999").getMillis(), parsed.getTimestampFromEpoch()); String testSpec = "{\"enabled\": true,\"useFieldDiscovery\": true, \"fields\": [\"parseThisRootField\"]}"; final JSONPathSpec parsedSpec = jsonMapper.readValue(testSpec, JSONPathSpec.class); diff --git a/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java b/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java index 1af0ff4968c..d41164effc1 100644 --- a/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java +++ b/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java @@ -24,10 +24,10 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import io.druid.data.input.Firehose; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -124,7 +124,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest for (int i = 0; i < 10; i++) { for (int j = 0; j < 100; j++) { final Row row = rows.get(i * 100 + j); - Assert.assertEquals(new DateTime(20171220 + i), row.getTimestamp()); + Assert.assertEquals(DateTimes.utc(20171220 + i), row.getTimestamp()); Assert.assertEquals(String.valueOf(i), row.getDimension("a").get(0)); Assert.assertEquals(String.valueOf(j), row.getDimension("b").get(0)); } diff --git a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java index 0357ae9ada1..52667a098b5 100644 --- a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java @@ -20,6 +20,7 @@ package io.druid.data.input.impl; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.format.ISODateTimeFormat; import org.junit.Assert; @@ -32,7 +33,7 @@ public class TimestampSpecTest { TimestampSpec spec = new TimestampSpec("TIMEstamp", "yyyy-MM-dd", null); Assert.assertEquals( - new DateTime("2014-03-01"), + DateTimes.of("2014-03-01"), spec.extractTimestamp(ImmutableMap.of("TIMEstamp", "2014-03-01")) ); } @@ -40,9 +41,9 @@ public class TimestampSpecTest @Test public void testExtractTimestampWithMissingTimestampColumn() throws Exception { - TimestampSpec spec = new TimestampSpec(null, null, new DateTime(0)); + TimestampSpec spec = new TimestampSpec(null, null, DateTimes.EPOCH); Assert.assertEquals( - new DateTime("1970-01-01"), + DateTimes.of("1970-01-01"), spec.extractTimestamp(ImmutableMap.of("dim", "foo")) ); } diff --git a/api/src/test/java/io/druid/timeline/DataSegmentTest.java b/api/src/test/java/io/druid/timeline/DataSegmentTest.java index 5488e97d965..50582599cc4 100644 --- a/api/src/test/java/io/druid/timeline/DataSegmentTest.java +++ b/api/src/test/java/io/druid/timeline/DataSegmentTest.java @@ -28,11 +28,12 @@ import com.google.common.collect.Range; import com.google.common.collect.Sets; import io.druid.TestObjectMapper; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.PartitionChunk; import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.ShardSpecLookup; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -90,7 +91,7 @@ public class DataSegmentTest public void testV1Serialization() throws Exception { - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -150,8 +151,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(NoneShardSpec.instance()) .build(); @@ -166,8 +167,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(getShardSpec(0)) .build(); @@ -182,8 +183,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(getShardSpec(7)) .build(); @@ -198,8 +199,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .build(); final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); @@ -240,7 +241,7 @@ public class DataSegmentTest { return DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .size(1) .build(); diff --git a/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java b/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java index 5ae9d1dae69..a58fe0b6f6c 100644 --- a/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java +++ b/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java @@ -19,8 +19,8 @@ package io.druid.timeline; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegmentUtils.SegmentIdentifierParts; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -32,19 +32,19 @@ public class DataSegmentUtilsTest public void testBasic() { String datasource = "datasource"; - SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0"); + SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0"); Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", null); + desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null); Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); } @@ -53,19 +53,19 @@ public class DataSegmentUtilsTest public void testDataSourceWithUnderscore1() { String datasource = "datasource_1"; - SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0"); + SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0"); Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", null); + desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null); Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); } @@ -74,28 +74,28 @@ public class DataSegmentUtilsTest public void testDataSourceWithUnderscore2() { String dataSource = "datasource_2015-01-01T00:00:00.000Z"; - SegmentIdentifierParts desc = new SegmentIdentifierParts(dataSource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0"); + SegmentIdentifierParts desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0"); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString() ); Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString() ); Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString())); - desc = new SegmentIdentifierParts(dataSource, new Interval("2015-01-02/2015-01-03"), "ver", null); + desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", null); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString() ); Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString() diff --git a/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java index a8ce3d2837b..ecc32d3fec6 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java @@ -24,6 +24,7 @@ import com.google.common.collect.Iterables; import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.SegmentGenerator; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -41,7 +42,6 @@ import io.druid.segment.VirtualColumns; import io.druid.segment.column.ValueType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -89,7 +89,7 @@ public class ExpressionBenchmark BenchmarkColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false) ), ImmutableList.of(), - new Interval("2000/P1D"), + Intervals.of("2000/P1D"), false ); diff --git a/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java index f18cf2c084a..ca700b87e64 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java @@ -39,6 +39,7 @@ import org.openjdk.jmh.runner.options.OptionsBuilder; import java.text.SimpleDateFormat; import java.util.Date; +import java.util.Locale; import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) @@ -60,7 +61,7 @@ public class TimeParseBenchmark @Setup public void setup() { - SimpleDateFormat format = new SimpleDateFormat(DATA_FORMAT); + SimpleDateFormat format = new SimpleDateFormat(DATA_FORMAT, Locale.ENGLISH); long start = System.currentTimeMillis(); int rowsPerBatch = numRows / numBatches; int numRowInBatch = 0; diff --git a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java index f8b5da8dcc9..3f9a1dc9ab2 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java +++ b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java @@ -20,6 +20,7 @@ package io.druid.benchmark.datagen; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; @@ -84,7 +85,7 @@ public class BenchmarkSchemas basicSchemaIngestAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "metFloatZipf")); basicSchemaIngestAggs.add(new HyperUniquesAggregatorFactory("hyper", "dimHyperUnique")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, @@ -104,7 +105,7 @@ public class BenchmarkSchemas List basicSchemaIngestAggs = new ArrayList<>(); basicSchemaIngestAggs.add(new CountAggregatorFactory("rows")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, @@ -125,7 +126,7 @@ public class BenchmarkSchemas basicSchemaIngestAggs.add(new LongSumAggregatorFactory("dimSequential", "dimSequential")); basicSchemaIngestAggs.add(new CountAggregatorFactory("rows")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, @@ -146,7 +147,7 @@ public class BenchmarkSchemas basicSchemaIngestAggs.add(new DoubleSumAggregatorFactory("dimSequential", "dimSequential")); basicSchemaIngestAggs.add(new CountAggregatorFactory("rows")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java index a54cdf3d44b..05e2d75336c 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java @@ -25,7 +25,7 @@ import com.google.common.io.Files; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.SegmentGenerator; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.data.input.Row; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; @@ -48,7 +48,6 @@ import io.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -124,7 +123,7 @@ public class SqlBenchmark groupByQuery = GroupByQuery .builder() .setDataSource("foo") - .setInterval(new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT)) + .setInterval(Intervals.ETERNITY) .setDimensions( Arrays.asList( new DefaultDimensionSpec("dimZipf", "d0"), diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java index 028334c5997..d428352738e 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java @@ -30,6 +30,7 @@ import io.druid.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -72,7 +73,6 @@ import io.druid.segment.column.ColumnConfig; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.serde.ComplexMetrics; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -214,7 +214,7 @@ public class TimeseriesBenchmark basicQueries.put("timeFilterAlphanumeric", timeFilterQuery); } { - QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval(200000, 300000))); + QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(200000, 300000))); List queryAggs = new ArrayList<>(); LongSumAggregatorFactory lsaf = new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"); queryAggs.add(lsaf); diff --git a/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java index ef4eca1ca57..4f397ec882c 100644 --- a/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -19,6 +19,7 @@ package io.druid.server.coordinator; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -39,7 +40,7 @@ import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) public class CostBalancerStrategyBenchmark { - private final static DateTime t0 = new DateTime("2016-01-01T01:00:00Z"); + private final static DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z"); private List segments; private DataSegment segment; diff --git a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java index dd6f271f066..593dd76a76b 100644 --- a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java +++ b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java @@ -22,8 +22,8 @@ package io.druid.benchmark; import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.data.input.InputRow; +import io.druid.java.util.common.Intervals; import io.druid.segment.column.ValueType; -import org.joda.time.Interval; import org.junit.Test; import java.util.ArrayList; @@ -378,13 +378,13 @@ public class BenchmarkDataGeneratorTest ) ); - BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 600000), 100); + BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, Intervals.utc(50000, 600000), 100); for (int i = 0; i < 100; i++) { InputRow row = dataGenerator.nextRow(); //System.out.println("S-ROW: " + row); } - BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 50001), 100); + BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, Intervals.utc(50000, 50001), 100); for (int i = 0; i < 100; i++) { InputRow row = dataGenerator2.nextRow(); //System.out.println("S2-ROW: " + row); diff --git a/codestyle/joda-time-forbidden-apis.txt b/codestyle/joda-time-forbidden-apis.txt new file mode 100644 index 00000000000..76b1fe59f42 --- /dev/null +++ b/codestyle/joda-time-forbidden-apis.txt @@ -0,0 +1,46 @@ +@defaultMessage Uses default time zone +org.joda.time.DateTime#() +org.joda.time.DateTime#(long) +org.joda.time.DateTime#(java.lang.Object) +org.joda.time.DateTime#(int, int, int, int, int) +org.joda.time.DateTime#(int, int, int, int, int, int) +org.joda.time.DateTime#(int, int, int, int, int, int, int) +org.joda.time.DateTime#now() +org.joda.time.Instant#toDateTime() +org.joda.time.Instant#toMutableDateTime() +org.joda.time.Instant#toMutableDateTimeISO() +org.joda.time.base.AbstractInstant#toDateTimeISO() +org.joda.time.base.AbstractInstant#toDateTime() +org.joda.time.base.AbstractInstant#toMutableDateTime() +org.joda.time.base.AbstractInstant#toMutableDateTimeISO() +org.joda.time.LocalDateTime#() +org.joda.time.LocalDateTime#(long) +org.joda.time.LocalDateTime#(java.lang.Object) +org.joda.time.LocalDateTime#now() +org.joda.time.LocalDateTime#fromDateFields(java.util.Date) +org.joda.time.LocalDateTime#toDate() +org.joda.time.LocalDateTime#toDateTime() +org.joda.time.LocalDate#() +org.joda.time.LocalDate#(long) +org.joda.time.LocalDate#(java.lang.Object) +org.joda.time.LocalDate#fromDateFields(java.util.Date) +org.joda.time.LocalDate#now() +org.joda.time.LocalDate#toDate() +org.joda.time.LocalDate#toDateTime(org.joda.time.LocalTime) +org.joda.time.LocalDate#toDateTimeAtCurrentTime() +org.joda.time.LocalDate#toDateTimeAtStartOfDay() +org.joda.time.LocalDate#toInterval() +org.joda.time.LocalTime#() +org.joda.time.LocalTime#(long) +org.joda.time.LocalTime#(java.lang.Object) +org.joda.time.LocalTime#fromDateFields(java.util.Date) +org.joda.time.LocalTime#now() +org.joda.time.LocalTime#toDateTimeToday() +org.joda.time.Interval#(long, long) +org.joda.time.Interval#(java.lang.Object) +org.joda.time.Interval#parse(java.lang.String) +org.joda.time.Interval#parseWithOffset(java.lang.String) + +@defaultMessage Doesn't handle edge cases where the start of day isn't midnight. +org.joda.time.LocalDate#toDateTimeAtMidnight() +org.joda.time.DateMidnight \ No newline at end of file diff --git a/common/src/main/java/io/druid/audit/AuditEntry.java b/common/src/main/java/io/druid/audit/AuditEntry.java index c1c3bcaa114..4b1d5bacb3f 100644 --- a/common/src/main/java/io/druid/audit/AuditEntry.java +++ b/common/src/main/java/io/druid/audit/AuditEntry.java @@ -22,6 +22,7 @@ package io.druid.audit; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; /** @@ -50,7 +51,7 @@ public class AuditEntry this.key = key; this.type = type; this.auditInfo = authorInfo; - this.auditTime = auditTime == null ? DateTime.now() : auditTime; + this.auditTime = auditTime == null ? DateTimes.nowUtc() : auditTime; this.payload = payload; } @@ -151,7 +152,7 @@ public class AuditEntry this.key = null; this.auditInfo = null; this.payload = null; - this.auditTime = DateTime.now(); + this.auditTime = DateTimes.nowUtc(); } public Builder key(String key) diff --git a/common/src/main/java/io/druid/math/expr/Function.java b/common/src/main/java/io/druid/math/expr/Function.java index df226372080..b3e6ac87e04 100644 --- a/common/src/main/java/io/druid/math/expr/Function.java +++ b/common/src/main/java/io/druid/math/expr/Function.java @@ -30,7 +30,9 @@ import org.joda.time.format.ISODateTimeFormat; import java.util.List; /** + * Do NOT remove "unused" members in this class. They are used by generated Antlr */ +@SuppressWarnings("unused") interface Function { String name(); @@ -1024,7 +1026,7 @@ interface Function } final String arg = args.get(0).eval(bindings).asString(); - return ExprEval.of(Strings.nullToEmpty(arg).toLowerCase()); + return ExprEval.of(StringUtils.toLowerCase(Strings.nullToEmpty(arg))); } } @@ -1044,7 +1046,7 @@ interface Function } final String arg = args.get(0).eval(bindings).asString(); - return ExprEval.of(Strings.nullToEmpty(arg).toUpperCase()); + return ExprEval.of(StringUtils.toUpperCase(Strings.nullToEmpty(arg))); } } } diff --git a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java index 831438c22af..aa5a7e81e93 100644 --- a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java +++ b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java @@ -25,7 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.UOE; import io.druid.java.util.common.guava.Comparators; import io.druid.timeline.partition.ImmutablePartitionHolder; @@ -288,7 +288,7 @@ public class VersionedIntervalTimeline implements Timel } Interval lower = completePartitionsTimeline.floorKey( - new Interval(interval.getStartMillis(), JodaUtils.MAX_INSTANT) + new Interval(interval.getStart(), DateTimes.MAX) ); if (lower == null || !lower.overlaps(interval)) { diff --git a/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java b/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java index 5c4a7646b34..df5c3791afa 100644 --- a/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java @@ -19,6 +19,8 @@ package io.druid.common.utils; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import org.joda.time.Duration; import org.joda.time.Interval; import org.joda.time.Period; @@ -37,18 +39,18 @@ public class JodaUtilsTest public void testUmbrellaIntervalsSimple() throws Exception { List intervals = Arrays.asList( - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-01-01/2011-01-02"), - new Interval("2011-02-01/2011-02-05"), - new Interval("2011-02-03/2011-02-08"), - new Interval("2011-01-01/2011-01-03"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-05/2011-03-06"), - new Interval("2011-02-01/2011-02-02") + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-01-01/2011-01-02"), + Intervals.of("2011-02-01/2011-02-05"), + Intervals.of("2011-02-03/2011-02-08"), + Intervals.of("2011-01-01/2011-01-03"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-05/2011-03-06"), + Intervals.of("2011-02-01/2011-02-02") ); Assert.assertEquals( - new Interval("2011-01-01/2011-03-06"), + Intervals.of("2011-01-01/2011-03-06"), JodaUtils.umbrellaInterval(intervals) ); } @@ -71,23 +73,23 @@ public class JodaUtilsTest public void testCondenseIntervalsSimple() throws Exception { List intervals = Arrays.asList( - new Interval("2011-01-01/2011-01-02"), - new Interval("2011-01-02/2011-01-03"), - new Interval("2011-02-01/2011-02-05"), - new Interval("2011-02-01/2011-02-02"), - new Interval("2011-02-03/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06") + Intervals.of("2011-01-01/2011-01-02"), + Intervals.of("2011-01-02/2011-01-03"), + Intervals.of("2011-02-01/2011-02-05"), + Intervals.of("2011-02-01/2011-02-02"), + Intervals.of("2011-02-03/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06") ); Assert.assertEquals( Arrays.asList( - new Interval("2011-01-01/2011-01-03"), - new Interval("2011-02-01/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06") + Intervals.of("2011-01-01/2011-01-03"), + Intervals.of("2011-02-01/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06") ), JodaUtils.condenseIntervals(intervals) ); @@ -97,31 +99,31 @@ public class JodaUtilsTest public void testCondenseIntervalsMixedUp() throws Exception { List intervals = Arrays.asList( - new Interval("2011-01-01/2011-01-02"), - new Interval("2011-01-02/2011-01-03"), - new Interval("2011-02-01/2011-02-05"), - new Interval("2011-02-01/2011-02-02"), - new Interval("2011-02-03/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06"), - new Interval("2011-04-01/2011-04-05"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-05-01/2011-05-05"), - new Interval("2011-05-02/2011-05-07") + Intervals.of("2011-01-01/2011-01-02"), + Intervals.of("2011-01-02/2011-01-03"), + Intervals.of("2011-02-01/2011-02-05"), + Intervals.of("2011-02-01/2011-02-02"), + Intervals.of("2011-02-03/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06"), + Intervals.of("2011-04-01/2011-04-05"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-05-01/2011-05-05"), + Intervals.of("2011-05-02/2011-05-07") ); for (int i = 0; i < 20; ++i) { Collections.shuffle(intervals); Assert.assertEquals( Arrays.asList( - new Interval("2011-01-01/2011-01-03"), - new Interval("2011-02-01/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06"), - new Interval("2011-04-01/2011-04-05"), - new Interval("2011-05-01/2011-05-07") + Intervals.of("2011-01-01/2011-01-03"), + Intervals.of("2011-02-01/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06"), + Intervals.of("2011-04-01/2011-04-05"), + Intervals.of("2011-05-01/2011-05-07") ), JodaUtils.condenseIntervals(intervals) ); @@ -131,15 +133,13 @@ public class JodaUtilsTest @Test public void testMinMaxInterval() { - final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); - Assert.assertEquals(Long.MAX_VALUE, interval.toDuration().getMillis()); + Assert.assertEquals(Long.MAX_VALUE, Intervals.ETERNITY.toDuration().getMillis()); } @Test public void testMinMaxDuration() { - final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); - final Duration duration = interval.toDuration(); + final Duration duration = Intervals.ETERNITY.toDuration(); Assert.assertEquals(Long.MAX_VALUE, duration.getMillis()); Assert.assertEquals("PT9223372036854775.807S", duration.toString()); } @@ -148,8 +148,7 @@ public class JodaUtilsTest @Test(expected = ArithmeticException.class) public void testMinMaxPeriod() { - final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); - final Period period = interval.toDuration().toPeriod(); + final Period period = Intervals.ETERNITY.toDuration().toPeriod(); Assert.assertEquals(Long.MAX_VALUE, period.getMinutes()); } diff --git a/common/src/test/java/io/druid/concurrent/ExecsTest.java b/common/src/test/java/io/druid/concurrent/ExecsTest.java index fdb75260737..714d3516366 100644 --- a/common/src/test/java/io/druid/concurrent/ExecsTest.java +++ b/common/src/test/java/io/druid/concurrent/ExecsTest.java @@ -76,7 +76,7 @@ public class ExecsTest { for (int i = 0; i < nTasks; i++) { final int taskID = i; - System.out.println("Produced task" + taskID); + log.info("Produced task %d", taskID); blockingExecutor.submit( new Runnable() { diff --git a/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java b/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java index d981aeb081e..e1d9f8adbbf 100644 --- a/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java +++ b/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java @@ -57,7 +57,7 @@ public class LifecycleLockTest finishLatch.countDown(); } catch (InterruptedException e) { - e.printStackTrace(); + throw new RuntimeException(e); } } }.start(); @@ -99,7 +99,7 @@ public class LifecycleLockTest finishLatch.countDown(); } catch (InterruptedException e) { - e.printStackTrace(); + throw new RuntimeException(e); } } }.start(); diff --git a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java index 6f955fa2933..0eb158798f9 100644 --- a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java +++ b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java @@ -25,8 +25,8 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; - -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.timeline.partition.ImmutablePartitionHolder; import io.druid.timeline.partition.IntegerPartitionChunk; @@ -86,7 +86,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-02/2011-04-06", "2", 1), createExpected("2011-04-06/2011-04-09", "3", 4) ), - timeline.lookup(new Interval("2011-04-01/2011-04-09")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); } @@ -95,7 +95,7 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(1), - timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1)) ); assertValues( Arrays.asList( @@ -104,7 +104,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-03/2011-04-06", "1", 3), createExpected("2011-04-06/2011-04-09", "3", 4) ), - timeline.lookup(new Interval("2011-04-01/2011-04-09")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); } @@ -113,11 +113,11 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(1), - timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1)) ); Assert.assertEquals( makeSingle(2), - timeline.remove(new Interval("2011-04-01/2011-04-03"), "1", makeSingle(2)) + timeline.remove(Intervals.of("2011-04-01/2011-04-03"), "1", makeSingle(2)) ); assertValues( Arrays.asList( @@ -125,7 +125,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-03/2011-04-06", "1", 3), createExpected("2011-04-06/2011-04-09", "3", 4) ), - timeline.lookup(new Interval("2011-04-01/2011-04-09")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); } @@ -134,7 +134,7 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(1), - timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1)) ); assertValues( Arrays.asList( @@ -142,7 +142,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-02/2011-04-03", "1", 2), createExpected("2011-04-03/2011-04-05", "1", 3) ), - timeline.lookup(new Interval("2011-04-01/2011-04-05")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-05")) ); assertValues( @@ -150,7 +150,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-02T18/2011-04-03", "1", 2), createExpected("2011-04-03/2011-04-04T01", "1", 3) ), - timeline.lookup(new Interval("2011-04-02T18/2011-04-04T01")) + timeline.lookup(Intervals.of("2011-04-02T18/2011-04-04T01")) ); } @@ -161,21 +161,21 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-05-01/2011-05-09", "4", 9) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @Test public void testMay2() throws Exception { - Assert.assertNotNull(timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(1))); + Assert.assertNotNull(timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(1))); assertValues( Arrays.asList( createExpected("2011-05-01/2011-05-03", "2", 7), createExpected("2011-05-03/2011-05-04", "3", 8), createExpected("2011-05-04/2011-05-05", "2", 7) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @@ -184,25 +184,25 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(9), - timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(9)) + timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(9)) ); Assert.assertEquals( makeSingle(7), - timeline.remove(new Interval("2011-05-01/2011-05-05"), "2", makeSingle(7)) + timeline.remove(Intervals.of("2011-05-01/2011-05-05"), "2", makeSingle(7)) ); assertValues( Arrays.asList( createExpected("2011-05-01/2011-05-02", "1", 6), createExpected("2011-05-03/2011-05-04", "3", 8) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @Test public void testInsertInWrongOrder() throws Exception { - DateTime overallStart = new DateTime().minus(Hours.TWO); + DateTime overallStart = DateTimes.nowUtc().minus(Hours.TWO); Assert.assertTrue( "These timestamps have to be at the end AND include now for this test to work.", @@ -241,32 +241,32 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01/2011-10-02"), "1") + timeline.findEntry(Intervals.of("2011-10-01/2011-10-02"), "1") ); Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01/2011-10-01T10"), "1") + timeline.findEntry(Intervals.of("2011-10-01/2011-10-01T10"), "1") ); Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01T02/2011-10-02"), "1") + timeline.findEntry(Intervals.of("2011-10-01T02/2011-10-02"), "1") ); Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01T04/2011-10-01T17"), "1") + timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-01T17"), "1") ); Assert.assertEquals( null, - timeline.findEntry(new Interval("2011-10-01T04/2011-10-01T17"), "2") + timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-01T17"), "2") ); Assert.assertEquals( null, - timeline.findEntry(new Interval("2011-10-01T04/2011-10-02T17"), "1") + timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-02T17"), "1") ); } @@ -280,7 +280,7 @@ public class VersionedIntervalTimelineTest Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-01-02T02/2011-01-04"), "1") + timeline.findEntry(Intervals.of("2011-01-02T02/2011-01-04"), "1") ); } @@ -301,7 +301,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-10-04/2011-10-05", "4", 4), createExpected("2011-10-05/2011-10-06", "5", 5) ), - timeline.lookup(new Interval("2011-10-01/2011-10-06")) + timeline.lookup(Intervals.of("2011-10-01/2011-10-06")) ); } @@ -313,14 +313,14 @@ public class VersionedIntervalTimelineTest add("2011-10-06/2011-10-07", "6", IntegerPartitionChunk.make(null, 10, 0, 60)); assertValues( ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); add("2011-10-06/2011-10-07", "6", IntegerPartitionChunk.make(10, 20, 1, 61)); assertValues( ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); @@ -337,7 +337,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); } @@ -368,10 +368,10 @@ public class VersionedIntervalTimelineTest testIncompletePartitionDoesNotOvershadow(); final IntegerPartitionChunk chunk = IntegerPartitionChunk.make(null, 10, 0, 60); - Assert.assertEquals(chunk, timeline.remove(new Interval("2011-10-05/2011-10-07"), "6", chunk)); + Assert.assertEquals(chunk, timeline.remove(Intervals.of("2011-10-05/2011-10-07"), "6", chunk)); assertValues( ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); } @@ -384,18 +384,18 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-05-01/2011-05-09", "5", 10) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); Assert.assertEquals( makeSingle(10), - timeline.remove(new Interval("2011-05-01/2011-05-10"), "5", makeSingle(10)) + timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "5", makeSingle(10)) ); assertValues( Collections.singletonList( createExpected("2011-05-01/2011-05-09", "4", 9) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); add("2011-05-01/2011-05-10", "5", 10); @@ -403,18 +403,18 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-05-01/2011-05-09", "5", 10) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); Assert.assertEquals( makeSingle(9), - timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(9)) + timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(9)) ); assertValues( Collections.singletonList( createExpected("2011-05-01/2011-05-09", "5", 10) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @@ -454,7 +454,7 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-01-01/2011-01-10", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-10")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-10")) ); } @@ -475,7 +475,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -496,7 +496,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -515,7 +515,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-05", "1", 1), createExpected("2011-01-05/2011-01-15", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -534,7 +534,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-05", "1", 1), createExpected("2011-01-05/2011-01-15", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -553,7 +553,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 1), createExpected("2011-01-10/2011-01-15", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -572,7 +572,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 1), createExpected("2011-01-10/2011-01-15", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -592,7 +592,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -612,7 +612,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -632,7 +632,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -652,7 +652,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -674,7 +674,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-03/2011-01-06", "3", 3), createExpected("2011-01-06/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -696,7 +696,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-03/2011-01-06", "3", 3), createExpected("2011-01-06/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -718,7 +718,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-03/2011-01-06", "3", 3), createExpected("2011-01-06/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -741,7 +741,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-06/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -764,7 +764,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-06/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -787,7 +787,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-06/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -811,7 +811,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-15", "1", 2), createExpected("2011-01-15/2011-01-20", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -835,7 +835,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-15", "1", 2), createExpected("2011-01-15/2011-01-20", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -855,7 +855,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -875,7 +875,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -897,7 +897,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-25", "3", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-25")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-25")) ); } @@ -919,7 +919,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-25", "3", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-25")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-25")) ); } @@ -941,7 +941,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-13", "1", 1), createExpected("2011-01-13/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -961,7 +961,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -983,7 +983,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-17", "2", 3), createExpected("2011-01-17/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1005,7 +1005,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-17", "2", 3), createExpected("2011-01-17/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1026,7 +1026,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "1", 1), createExpected("2011-01-15/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1047,7 +1047,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "1", 1), createExpected("2011-01-15/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1072,7 +1072,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-04/2011-01-05", "3", 3), createExpected("2011-01-05/2011-01-06", "4", 4) ), - timeline.lookup(new Interval("0000-01-01/3000-01-01")) + timeline.lookup(Intervals.of("0000-01-01/3000-01-01")) ); } @@ -1093,7 +1093,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01T12/2011-01-02", "3", 3), createExpected("2011-01-02/3011-01-03", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/3011-01-03")) + timeline.lookup(Intervals.of("2011-01-01/3011-01-03")) ); } @@ -1175,13 +1175,13 @@ public class VersionedIntervalTimelineTest add("2011-01-01/2011-01-20", "1", 1); add("2011-01-10/2011-01-15", "2", 2); - timeline.remove(new Interval("2011-01-10/2011-01-15"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-15"), "2", makeSingle(2)); assertValues( Collections.singletonList( createExpected("2011-01-01/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1194,7 +1194,7 @@ public class VersionedIntervalTimelineTest add("2011-01-10/2011-01-20", "2", 2); add("2011-01-20/2011-01-30", "3", 4); - timeline.remove(new Interval("2011-01-10/2011-01-20"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-20"), "2", makeSingle(2)); assertValues( Arrays.asList( @@ -1202,7 +1202,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-20/2011-01-30", "3", 4) ), - timeline.lookup(new Interval("2011-01-01/2011-01-30")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-30")) ); } @@ -1215,15 +1215,15 @@ public class VersionedIntervalTimelineTest add("2011-01-02/2011-01-03", "2", 2); add("2011-01-10/2011-01-14", "2", 3); - timeline.remove(new Interval("2011-01-02/2011-01-03"), "2", makeSingle(2)); - timeline.remove(new Interval("2011-01-10/2011-01-14"), "2", makeSingle(3)); + timeline.remove(Intervals.of("2011-01-02/2011-01-03"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-14"), "2", makeSingle(3)); assertValues( Collections.singletonList( createExpected("2011-01-01/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1236,7 +1236,7 @@ public class VersionedIntervalTimelineTest add("2011-01-10/2011-01-15", "2", 2); add("2011-01-15/2011-01-20", "2", 3); - timeline.remove(new Interval("2011-01-15/2011-01-20"), "2", makeSingle(3)); + timeline.remove(Intervals.of("2011-01-15/2011-01-20"), "2", makeSingle(3)); assertValues( Arrays.asList( @@ -1244,7 +1244,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-10/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1255,14 +1255,14 @@ public class VersionedIntervalTimelineTest add("2011-01-01/2011-01-20", "1", 1); add("2011-01-10/2011-01-15", "2", 2); - timeline.remove(new Interval("2011-01-10/2011-01-15"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-15"), "2", makeSingle(2)); add("2011-01-01/2011-01-20", "1", 1); assertValues( Collections.singletonList( createExpected("2011-01-01/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1271,11 +1271,11 @@ public class VersionedIntervalTimelineTest { Assert.assertNull( "Don't have it, should be null", - timeline.remove(new Interval("1970-01-01/2025-04-20"), "1", makeSingle(1)) + timeline.remove(Intervals.of("1970-01-01/2025-04-20"), "1", makeSingle(1)) ); Assert.assertNull( "Don't have it, should be null", - timeline.remove(new Interval("2011-04-01/2011-04-09"), "version does not exist", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "version does not exist", makeSingle(1)) ); } @@ -1289,7 +1289,7 @@ public class VersionedIntervalTimelineTest add("2011-01-10/2011-01-15", "3", 3); add("2011-01-15/2011-01-20", "4", 4); - timeline.remove(new Interval("2011-01-15/2011-01-20"), "4", makeSingle(4)); + timeline.remove(Intervals.of("2011-01-15/2011-01-20"), "4", makeSingle(4)); assertValues( Arrays.asList( @@ -1297,7 +1297,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-15", "3", 3) ), - timeline.lookup(new Interval(new DateTime(0), new DateTime(JodaUtils.MAX_INSTANT))) + timeline.lookup(new Interval(DateTimes.EPOCH, DateTimes.MAX)) ); } @@ -1504,7 +1504,7 @@ public class VersionedIntervalTimelineTest add("2011-04-01/2011-04-09", "1", 1); - Assert.assertTrue(timeline.lookup(Interval.parse("1970/1980")).isEmpty()); + Assert.assertTrue(timeline.lookup(Intervals.of("1970/1980")).isEmpty()); } // https://github.com/druid-io/druid/issues/3010 @@ -1526,7 +1526,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-04-01/2011-04-02")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-02")) ); add("2011-04-01/2011-04-02", "3", IntegerPartitionChunk.make(null, 1, 0, 110)); @@ -1540,7 +1540,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-04-01/2011-04-02")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-02")) ); assertValues( Sets.newHashSet( @@ -1564,7 +1564,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-04-01/2011-04-02")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-02")) ); } @@ -1579,58 +1579,58 @@ public class VersionedIntervalTimelineTest add("2011-04-15/2011-04-17", "1", new SingleElementPartitionChunk(1)); add("2011-04-17/2011-04-19", "1", new SingleElementPartitionChunk(1)); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-03"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-05"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-06"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-07"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-08"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-30"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-03"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-05"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-06"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-07"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-08"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "2")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "2")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "2")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "2")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "2")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "2")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "2")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "2")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-30"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-30"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-16"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-17"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-18"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-16"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-17"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-18"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-30"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-19/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-21/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-19/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-21/2011-04-22"), "0")); } @Test @@ -1645,77 +1645,77 @@ public class VersionedIntervalTimelineTest add("2011-04-17/2011-04-21", "11", new SingleElementPartitionChunk(1)); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-03"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-05"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-06"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-07"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-08"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-11"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-30"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-03"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-05"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-06"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-07"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-08"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-11"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-12"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-16"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-12"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-16"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-18"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-18"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-22"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-09"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-10"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-11"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-09"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-10"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-11"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-12"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-16"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-12"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-16"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-18"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-18"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-22"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-16"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-16"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-18"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-18"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-22"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-21/2011-04-22"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-21/2011-04-22"), "0")); } private Pair>> createExpected( @@ -1738,7 +1738,7 @@ public class VersionedIntervalTimelineTest ) { return Pair.of( - new Interval(intervalString), + Intervals.of(intervalString), Pair.of(version, new PartitionHolder(values)) ); } @@ -1750,17 +1750,17 @@ public class VersionedIntervalTimelineTest private void add(String interval, String version, Integer value) { - add(new Interval(interval), version, value); + add(Intervals.of(interval), version, value); } private void add(Interval interval, String version, Integer value) { - add(new Interval(interval), version, makeSingle(value)); + add(interval, version, makeSingle(value)); } private void add(String interval, String version, PartitionChunk value) { - add(new Interval(interval), version, value); + add(Intervals.of(interval), version, value); } private void add(Interval interval, String version, PartitionChunk value) diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java index 06e97ddea4d..eeb4048a9b4 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java +++ b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java @@ -22,6 +22,7 @@ package io.druid.emitter.ambari.metrics; import com.google.common.collect.Maps; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; @@ -44,7 +45,7 @@ public class WhiteListBasedDruidToTimelineEventConverterTest new DefaultObjectMapper() ); private ServiceMetricEvent event; - private final DateTime createdTime = new DateTime(); + private final DateTime createdTime = DateTimes.nowUtc(); private final String hostname = "testHost:8080"; private final String serviceName = "historical"; private final String defaultNamespace = prefix + "." + serviceName; diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java index 17428a545f6..3003103449e 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java @@ -21,11 +21,11 @@ package io.druid.storage.azure; import com.google.common.collect.ImmutableMap; import com.microsoft.azure.storage.StorageException; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -44,7 +44,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java index 5a5eec038ce..b01e6b638bd 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -22,11 +22,11 @@ package io.druid.storage.azure; import com.google.common.collect.ImmutableMap; import com.microsoft.azure.storage.StorageException; import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -50,7 +50,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport private static final String blobPath = "/path/to/storage/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java index 0f54dedf08a..af76f357142 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -26,12 +26,12 @@ import com.google.common.collect.Maps; import com.google.common.io.Files; import com.microsoft.azure.storage.StorageException; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -55,7 +55,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, @@ -94,7 +94,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java index 3014fcd5ad0..b257efb1282 100644 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java +++ b/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java @@ -23,13 +23,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; import org.jclouds.io.Payload; import org.jclouds.openstack.swift.v1.features.ObjectApi; import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -74,7 +74,7 @@ public class CloudFilesDataSegmentPusherTest DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java index eeb6dfbf85e..14e70157819 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java @@ -22,6 +22,7 @@ package io.druid.query.aggregation.distinctcount; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -61,7 +62,7 @@ public class DistinctCountTimeseriesQueryTest String visitor_id = "visitor_id"; String client_type = "client_type"; - DateTime time = new DateTime("2016-03-04T00:00:00.000Z"); + DateTime time = DateTimes.of("2016-03-04T00:00:00.000Z"); long timestamp = time.getMillis(); index.add( new MapBasedInputRow( diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index 190d79ed9bd..3e6b6a6f5ae 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -22,9 +22,9 @@ package io.druid.query.aggregation.distinctcount; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; - import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryRunnerTestHelper; @@ -79,7 +79,7 @@ public class DistinctCountTopNQueryTest String visitor_id = "visitor_id"; String client_type = "client_type"; - DateTime time = new DateTime("2016-03-04T00:00:00.000Z"); + DateTime time = DateTimes.of("2016-03-04T00:00:00.000Z"); long timestamp = time.getMillis(); index.add( new MapBasedInputRow( diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java index 31dac78e5f3..3e74a87e689 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java @@ -20,11 +20,11 @@ package io.druid.storage.google; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -39,7 +39,7 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("bucket", bucket, "path", indexPath), null, diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java index a6d3facfd9c..1504a20333f 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java @@ -21,11 +21,11 @@ package io.druid.storage.google; import com.google.common.collect.ImmutableMap; import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Test; import java.io.File; @@ -42,7 +42,7 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport private static final String path = "/path/to/storage/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("bucket", bucket, "path", path), null, diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java index 83d35601ff1..6c845d433c7 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java @@ -25,11 +25,11 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -50,7 +50,7 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport private static final String path = "prefix/test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("bucket", bucket, "path", path), null, @@ -87,7 +87,7 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java index 4a93f9ec5d3..165254378c5 100644 --- a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java +++ b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java @@ -22,6 +22,7 @@ package io.druid.emitter.graphite; import com.google.common.collect.Maps; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.easymock.EasyMock; @@ -44,7 +45,7 @@ public class WhiteListBasedConverterTest new DefaultObjectMapper() ); private ServiceMetricEvent event; - private DateTime createdTime = new DateTime(); + private DateTime createdTime = DateTimes.nowUtc(); private String hostname = "testHost.yahoo.com:8080"; private String serviceName = "historical"; private String defaultNamespace = prefix + "." + serviceName + "." + GraphiteEmitter.sanitize(hostname); diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java index 9b0939219b3..b0cb778c302 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java @@ -18,9 +18,10 @@ */ package io.druid.data.input.orc; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.MapBasedInputRow; import io.druid.indexer.HadoopDruidIndexerConfig; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -41,7 +42,6 @@ import org.apache.orc.CompressionKind; import org.apache.orc.OrcFile; import org.apache.orc.TypeDescription; import org.apache.orc.Writer; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -101,7 +101,7 @@ public class DruidOrcInputFormatTest MapBasedInputRow row = (MapBasedInputRow) parser.parse(data); Assert.assertTrue(row.getEvent().keySet().size() == 4); - Assert.assertEquals(new DateTime(timestamp), row.getTimestamp()); + Assert.assertEquals(DateTimes.of(timestamp), row.getTimestamp()); Assert.assertEquals(parser.getParseSpec().getDimensionsSpec().getDimensionNames(), row.getDimensions()); Assert.assertEquals(col1, row.getEvent().get("col1")); Assert.assertEquals(Arrays.asList(col2), row.getDimension("col2")); diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java index 93d57e55ef6..cc53a6db4a7 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java @@ -36,6 +36,7 @@ import io.druid.data.input.impl.TimestampSpec; import io.druid.guice.GuiceInjectors; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import org.apache.hadoop.hive.ql.io.orc.OrcStruct; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; @@ -43,7 +44,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -167,7 +167,7 @@ public class OrcHadoopInputRowParserTest oi.setStructFieldData(struct, oi.getStructFieldRef("col6"), null); final InputRow row = parser.parse(struct); - Assert.assertEquals("timestamp", new DateTime("2000-01-01"), row.getTimestamp()); + Assert.assertEquals("timestamp", DateTimes.of("2000-01-01"), row.getTimestamp()); Assert.assertEquals("col1", "foo", row.getRaw("col1")); Assert.assertEquals("col2", ImmutableList.of("foo", "bar"), row.getRaw("col2")); Assert.assertEquals("col3", 1.0f, row.getRaw("col3")); diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java index ff301b55b88..0cbac888e92 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -25,7 +25,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; @@ -38,6 +37,8 @@ import io.druid.indexer.HadoopyShardSpec; import io.druid.indexer.IndexGeneratorJob; import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -117,7 +118,7 @@ public class OrcIndexGeneratorJobTest "2014102212,i.example.com,963", "2014102212,j.example.com,333" ); - private final Interval interval = new Interval("2014-10-22T00:00:00Z/P1D"); + private final Interval interval = Intervals.of("2014-10-22T00:00:00Z/P1D"); private File dataRoot; private File outputRoot; private Integer[][][] shardInfoForEachSegment = new Integer[][][]{{ diff --git a/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java b/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java index 2b5b705ca29..a1d3b3a2fa6 100755 --- a/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java +++ b/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java @@ -29,6 +29,7 @@ import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.ParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import org.apache.avro.LogicalType; import org.apache.avro.LogicalTypes; import org.apache.avro.Schema; @@ -92,7 +93,7 @@ public class ParquetHadoopInputRowParser implements InputRowParser actual = new ImmutableList.Builder<>(); diff --git a/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java b/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java index a6286e46504..0f5694038d9 100644 --- a/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java +++ b/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java @@ -22,14 +22,13 @@ import com.metamx.emitter.service.ServiceMetricEvent; import com.timgroup.statsd.StatsDClient; import io.druid.emitter.statsd.StatsDEmitter; import io.druid.emitter.statsd.StatsDEmitterConfig; +import io.druid.java.util.common.DateTimes; +import org.junit.Test; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; -import org.joda.time.DateTime; -import org.junit.Test; - /** */ public class StatsDEmitterTest @@ -47,7 +46,7 @@ public class StatsDEmitterTest replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") - .build(new DateTime(), "query/cache/total/hitRate", 0.54) + .build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54) .build("broker", "brokerHost1") ); verify(client); @@ -75,7 +74,7 @@ public class StatsDEmitterTest .setDimension("remoteAddress", "194.0.90.2") .setDimension("id", "ID") .setDimension("context", "{context}") - .build(new DateTime(), "query/time", 10) + .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1") ); verify(client); @@ -103,7 +102,7 @@ public class StatsDEmitterTest .setDimension("remoteAddress", "194.0.90.2") .setDimension("id", "ID") .setDimension("context", "{context}") - .build(new DateTime(), "query/time", 10) + .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1") ); verify(client); diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java index 4466aa92a51..1a27163b5ea 100644 --- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java +++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java @@ -21,8 +21,9 @@ package io.druid.query.aggregation; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.primitives.Longs; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; import io.druid.segment.ColumnSelectorFactory; import org.joda.time.DateTime; @@ -116,7 +117,7 @@ public class TimestampAggregatorFactory extends AggregatorFactory @Override public Object finalizeComputation(Object object) { - return new DateTime((long) object); + return DateTimes.utc((long) object); } @Override diff --git a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java index bc5a3629895..d930b862a99 100644 --- a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java @@ -30,6 +30,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryPlus; @@ -46,7 +47,6 @@ import io.druid.query.select.SelectQueryRunnerFactory; import io.druid.query.select.SelectResultValue; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -85,7 +85,7 @@ public class MapVirtualColumnTest ); final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .build(); final IncrementalIndex index = new IncrementalIndex.Builder() .setIndexSchema(schema) diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java index 607d6b71dc5..f062a3acd95 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java @@ -39,6 +39,7 @@ import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import org.junit.Before; import org.junit.Test; import org.schemarepo.InMemoryRepository; @@ -75,7 +76,7 @@ public class AvroStreamInputRowParserTest public static final float SOME_FLOAT_VALUE = 0.23555f; public static final int SOME_INT_VALUE = 1; public static final long SOME_LONG_VALUE = 679865987569912369L; - public static final DateTime DATE_TIME = new DateTime(2015, 10, 25, 19, 30); + public static final DateTime DATE_TIME = new DateTime(2015, 10, 25, 19, 30, ISOChronology.getInstanceUTC()); public static final List DIMENSIONS = Arrays.asList(EVENT_TYPE, ID, SOME_OTHER_ID, IS_VALID); public static final TimeAndDimsParseSpec PARSE_SPEC = new TimeAndDimsParseSpec( new TimestampSpec("timestamp", "millis", null), diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java index b4b10c0e585..2583229d6ef 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.Lists; import com.google.common.io.Files; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -221,7 +222,7 @@ public class SketchAggregationWithSimpleDataTest Sequences.toList(seq, Lists.newArrayList()) ); - Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketch_count"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchEstimatePostAgg"), 0.01); @@ -249,7 +250,7 @@ public class SketchAggregationWithSimpleDataTest Sequences.toList(seq, Lists.newArrayList()) ); - Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); DimensionAndMetricValueExtractor value = Iterables.getOnlyElement(result.getValue().getValue()); Assert.assertEquals(38.0, value.getDoubleMetric("sketch_count"), 0.01); @@ -277,7 +278,7 @@ public class SketchAggregationWithSimpleDataTest ); Result result = (Result) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList())); - Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); Assert.assertEquals(100, result.getValue().getEvents().size()); Assert.assertEquals("AgMDAAAazJMCAAAAAACAPzz9j7pWTMdROWGf15uY1nI=", result.getValue().getEvents().get(0).getEvent().get("pty_country")); } diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java index cc0e19929ae..70b5e0c5906 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IOE; +import io.druid.java.util.common.Intervals; import io.druid.storage.hdfs.HdfsDataSegmentFinder; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; @@ -36,7 +37,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; @@ -56,80 +56,62 @@ public class HdfsDataSegmentFinderTest private static final ObjectMapper mapper = new DefaultObjectMapper(); private static final String DESCRIPTOR_JSON = "descriptor.json"; private static final String INDEX_ZIP = "index.zip"; - private static final DataSegment SEGMENT_1 = DataSegment.builder() - .dataSource("wikipedia") - .interval( - new Interval( - "2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z" - ) - ) - .version("2015-10-21T22:07:57.074Z") - .loadSpec( - ImmutableMap.of( - "type", - "hdfs", - "path", - "hdfs://abc.com:1234/somewhere/index.zip" - ) - ) - .dimensions(ImmutableList.of("language", "page")) - .metrics(ImmutableList.of("count")) - .build(); + private static final DataSegment SEGMENT_1 = DataSegment + .builder() + .dataSource("wikipedia") + .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) + .version("2015-10-21T22:07:57.074Z") + .loadSpec( + ImmutableMap.of( + "type", + "hdfs", + "path", + "hdfs://abc.com:1234/somewhere/index.zip" + ) + ) + .dimensions(ImmutableList.of("language", "page")) + .metrics(ImmutableList.of("count")) + .build(); - private static final DataSegment SEGMENT_2 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z" - ) - ) - .build(); + private static final DataSegment SEGMENT_2 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z")) + .build(); - private static final DataSegment SEGMENT_3 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .version("2015-10-22T22:07:57.074Z") - .build(); + private static final DataSegment SEGMENT_3 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .version("2015-10-22T22:07:57.074Z") + .build(); - private static final DataSegment SEGMENT_4_0 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(0, 2)) - .build(); + private static final DataSegment SEGMENT_4_0 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(0, 2)) + .build(); - private static final DataSegment SEGMENT_4_1 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(1, 2)) - .build(); + private static final DataSegment SEGMENT_4_1 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(1, 2)) + .build(); - private static final DataSegment SEGMENT_5 = DataSegment.builder() - .dataSource("wikipedia") - .interval( - new Interval( - "2013-09-03T00:00:00.000Z/2013-09-04T00:00:00.000Z" - ) - ) - .version("2015-10-21T22:07:57.074Z") - .loadSpec( - ImmutableMap.of( - "type", - "hdfs", - "path", - "hdfs://abc.com:1234/somewhere/1_index.zip" - ) - ) - .dimensions(ImmutableList.of("language", "page")) - .metrics(ImmutableList.of("count")) - .build(); + private static final DataSegment SEGMENT_5 = DataSegment + .builder() + .dataSource("wikipedia") + .interval(Intervals.of("2013-09-03T00:00:00.000Z/2013-09-04T00:00:00.000Z")) + .version("2015-10-21T22:07:57.074Z") + .loadSpec( + ImmutableMap.of( + "type", + "hdfs", + "path", + "hdfs://abc.com:1234/somewhere/1_index.zip" + ) + ) + .dimensions(ImmutableList.of("language", "page")) + .metrics(ImmutableList.of("count")) + .build(); private static MiniDFSCluster miniCluster; private static File hdfsTmpDir; diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java index d9118bf2292..bdf499dcd90 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java @@ -21,6 +21,7 @@ package io.druid.storage.hdfs; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -28,7 +29,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -183,7 +183,7 @@ public class HdfsDataSegmentKillerTest { return new DataSegment( "dataSource", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "hdfs", diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java index e12f0ed516c..f5baebb9339 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -20,7 +20,6 @@ package io.druid.storage.hdfs; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.InjectableValues; @@ -42,6 +41,7 @@ import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.JobHelper; import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.GranularityModule; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.segment.loading.LocalDataSegmentPusher; import io.druid.segment.loading.LocalDataSegmentPusherConfig; @@ -57,6 +57,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskType; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -147,7 +148,7 @@ public class HdfsDataSegmentPusherTest DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), @@ -230,7 +231,7 @@ public class HdfsDataSegmentPusherTest for (int i = 0; i < numberOfSegments; i++) { segments[i] = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), @@ -337,11 +338,10 @@ public class HdfsDataSegmentPusherTest Interval.class, new StdDeserializer(Interval.class) { @Override - public Interval deserialize( - JsonParser jsonParser, DeserializationContext deserializationContext - ) throws IOException, JsonProcessingException + public Interval deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) + throws IOException { - return new Interval(jsonParser.getText()); + return Intervals.of(jsonParser.getText()); } } ); @@ -353,7 +353,7 @@ public class HdfsDataSegmentPusherTest public void shouldNotHaveColonsInHdfsStorageDir() throws Exception { - Interval interval = new Interval("2011-10-01/2011-10-02"); + Interval interval = Intervals.of("2011-10-01/2011-10-02"); ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -414,7 +414,7 @@ public class HdfsDataSegmentPusherTest ) ); - Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30), 4712); + Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30, ISOChronology.getInstanceUTC()), 4712); Path path = JobHelper.makeFileNamePath( new Path(cfg.getSchema().getIOConfig().getSegmentOutputPath()), new DistributedFileSystem(), @@ -524,7 +524,7 @@ public class HdfsDataSegmentPusherTest ) ); - Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30), 4712); + Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30, ISOChronology.getInstanceUTC()), 4712); Path path = JobHelper.makeFileNamePath( new Path(cfg.getSchema().getIOConfig().getSegmentOutputPath()), new LocalFileSystem(), diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java index d0578bb55e3..8ecda69327c 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java @@ -183,7 +183,7 @@ public class QuantileSqlAggregator implements SqlAggregator } } else { final ExpressionVirtualColumn virtualColumn = input.toVirtualColumn( - String.format("%s:v", name), + StringUtils.format("%s:v", name), ValueType.FLOAT, plannerContext.getExprMacroTable() ); diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java index e42b0f4a7ac..b48ea4b6708 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.collections.StupidPool; +import io.druid.java.util.common.DateTimes; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; @@ -40,7 +41,6 @@ import io.druid.query.topn.TopNQueryQueryToolChest; import io.druid.query.topn.TopNQueryRunnerFactory; import io.druid.query.topn.TopNResultValue; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -146,7 +146,7 @@ public class ApproximateHistogramTopNQueryTest List> expectedResults = Collections.singletonList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java index 68450cc5066..a496a0bb16b 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java @@ -55,6 +55,7 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.AbstractTask; import io.druid.indexing.common.task.RealtimeIndexTask; import io.druid.indexing.common.task.TaskResource; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; @@ -266,7 +267,7 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler public TaskStatus run(final TaskToolbox toolbox) throws Exception { log.info("Starting up!"); - startTime = DateTime.now(); + startTime = DateTimes.nowUtc(); mapper = toolbox.getObjectMapper(); status = Status.STARTING; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java index eedeed801e7..d3c225095ef 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -65,6 +65,7 @@ import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.supervisor.Supervisor; import io.druid.indexing.overlord.supervisor.SupervisorReport; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; @@ -349,7 +350,7 @@ public class KafkaSupervisor implements Supervisor } } ); - firstRunTime = DateTime.now().plus(ioConfig.getStartDelay()); + firstRunTime = DateTimes.nowUtc().plus(ioConfig.getStartDelay()); scheduledExec.scheduleAtFixedRate( buildRunTask(), ioConfig.getStartDelay().getMillis(), @@ -666,7 +667,7 @@ public class KafkaSupervisor implements Supervisor if (taskInfoProvider.getTaskLocation(entry.getKey()).equals(TaskLocation.unknown())) { killTask(entry.getKey()); } else { - entry.getValue().startTime = new DateTime(0); + entry.getValue().startTime = DateTimes.EPOCH; } } } @@ -969,7 +970,7 @@ public class KafkaSupervisor implements Supervisor TaskGroup newTaskGroup = new TaskGroup(ImmutableMap.copyOf(startingPartitions), Optional.absent(), Optional.absent()); newTaskGroup.tasks.put(taskId, new TaskData()); - newTaskGroup.completionTimeout = DateTime.now().plus(ioConfig.getCompletionTimeout()); + newTaskGroup.completionTimeout = DateTimes.nowUtc().plus(ioConfig.getCompletionTimeout()); taskGroupList.add(newTaskGroup); } @@ -1052,7 +1053,7 @@ public class KafkaSupervisor implements Supervisor TaskGroup group = entry.getValue(); // find the longest running task from this group - DateTime earliestTaskStart = DateTime.now(); + DateTime earliestTaskStart = DateTimes.nowUtc(); for (TaskData taskData : group.tasks.values()) { if (earliestTaskStart.isAfter(taskData.startTime)) { earliestTaskStart = taskData.startTime; @@ -1075,7 +1076,7 @@ public class KafkaSupervisor implements Supervisor if (endOffsets != null) { // set a timeout and put this group in pendingCompletionTaskGroups so that it can be monitored for completion - group.completionTimeout = DateTime.now().plus(ioConfig.getCompletionTimeout()); + group.completionTimeout = DateTimes.nowUtc().plus(ioConfig.getCompletionTimeout()); pendingCompletionTaskGroups.putIfAbsent(groupId, Lists.newCopyOnWriteArrayList()); pendingCompletionTaskGroups.get(groupId).add(group); @@ -1362,11 +1363,11 @@ public class KafkaSupervisor implements Supervisor log.info("Creating new task group [%d] for partitions %s", groupId, partitionGroups.get(groupId).keySet()); Optional minimumMessageTime = (ioConfig.getLateMessageRejectionPeriod().isPresent() ? Optional.of( - DateTime.now().minus(ioConfig.getLateMessageRejectionPeriod().get()) + DateTimes.nowUtc().minus(ioConfig.getLateMessageRejectionPeriod().get()) ) : Optional.absent()); Optional maximumMessageTime = (ioConfig.getEarlyMessageRejectionPeriod().isPresent() ? Optional.of( - DateTime.now().plus(ioConfig.getEarlyMessageRejectionPeriod().get()) + DateTimes.nowUtc().plus(ioConfig.getEarlyMessageRejectionPeriod().get()) ) : Optional.absent()); taskGroups.put(groupId, new TaskGroup(generateStartingOffsetsForPartitionGroup(groupId), minimumMessageTime, maximumMessageTime)); @@ -1626,7 +1627,7 @@ public class KafkaSupervisor implements Supervisor Map partitionLag = getLagPerPartition(getHighestCurrentOffsets()); KafkaSupervisorReport report = new KafkaSupervisorReport( dataSource, - DateTime.now(), + DateTimes.nowUtc(), ioConfig.getTopic(), numPartitions, ioConfig.getReplicas(), @@ -1648,7 +1649,7 @@ public class KafkaSupervisor implements Supervisor Long remainingSeconds = null; if (startTime != null) { remainingSeconds = Math.max( - 0, ioConfig.getTaskDuration().getMillis() - (DateTime.now().getMillis() - startTime.getMillis()) + 0, ioConfig.getTaskDuration().getMillis() - (System.currentTimeMillis() - startTime.getMillis()) ) / 1000; } @@ -1674,7 +1675,7 @@ public class KafkaSupervisor implements Supervisor Map currentOffsets = entry.getValue().currentOffsets; Long remainingSeconds = null; if (taskGroup.completionTimeout != null) { - remainingSeconds = Math.max(0, taskGroup.completionTimeout.getMillis() - DateTime.now().getMillis()) + remainingSeconds = Math.max(0, taskGroup.completionTimeout.getMillis() - System.currentTimeMillis()) / 1000; } @@ -1822,7 +1823,7 @@ public class KafkaSupervisor implements Supervisor try { updateCurrentOffsets(); updateLatestOffsetsFromKafka(); - offsetsLastUpdated = DateTime.now(); + offsetsLastUpdated = DateTimes.nowUtc(); } catch (Exception e) { log.warn(e, "Exception while getting current/latest offsets"); diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java index c1c75754787..49a9b90033d 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java @@ -24,9 +24,9 @@ import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.segment.indexing.IOConfig; import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -111,8 +111,8 @@ public class KafkaIOConfigTest Assert.assertEquals(ImmutableMap.of("bootstrap.servers", "localhost:9092"), config.getConsumerProperties()); Assert.assertEquals(false, config.isUseTransaction()); Assert.assertEquals(true, config.isPauseAfterRead()); - Assert.assertEquals(new DateTime("2016-05-31T12:00Z"), config.getMinimumMessageTime().get()); - Assert.assertEquals(new DateTime("2016-05-31T14:00Z"), config.getMaximumMessageTime().get()); + Assert.assertEquals(DateTimes.of("2016-05-31T12:00Z"), config.getMinimumMessageTime().get()); + Assert.assertEquals(DateTimes.of("2016-05-31T14:00Z"), config.getMaximumMessageTime().get()); Assert.assertTrue("skipOffsetGaps", config.isSkipOffsetGaps()); } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index e27e29e4339..70084eb8983 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -35,6 +35,7 @@ import io.druid.indexing.common.TaskInfoProvider; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import org.easymock.Capture; @@ -346,7 +347,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testGetStartTime() throws Exception { client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); - DateTime now = DateTime.now(); + DateTime now = DateTimes.nowUtc(); Capture captured = Capture.newInstance(); expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) @@ -789,7 +790,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport @Test public void testGetStartTimeAsync() throws Exception { - final DateTime now = DateTime.now(); + final DateTime now = DateTimes.nowUtc(); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index cbcaca7ff81..ec02dc372ca 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -78,7 +78,9 @@ import io.druid.indexing.test.TestDataSegmentAnnouncer; import io.druid.indexing.test.TestDataSegmentKiller; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; @@ -128,7 +130,6 @@ import org.apache.curator.test.TestingCluster; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; import org.junit.After; @@ -420,7 +421,7 @@ public class KafkaIndexTaskTest kafkaServer.consumerProperties(), true, false, - new DateTime("2010"), + DateTimes.of("2010"), null, false ), @@ -477,7 +478,7 @@ public class KafkaIndexTaskTest true, false, null, - new DateTime("2010"), + DateTimes.of("2010"), false ), null, @@ -1663,7 +1664,7 @@ public class KafkaIndexTaskTest return FluentIterable.from( metadataStorageCoordinator.getUsedSegmentsForInterval( DATA_SCHEMA.getDataSource(), - new Interval("0000/3000") + Intervals.of("0000/3000") ) ).transform( new Function() @@ -1759,7 +1760,7 @@ public class KafkaIndexTaskTest private SegmentDescriptor SD(final Task task, final String intervalString, final int partitionNum) { - final Interval interval = new Interval(intervalString); + final Interval interval = Intervals.of(intervalString); return new SegmentDescriptor(interval, getLock(task, interval).getVersion(), partitionNum); } } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index b57814ca9c5..9ed12b7cbff 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; @@ -57,7 +56,9 @@ import io.druid.indexing.overlord.TaskRunnerListener; import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.supervisor.SupervisorReport; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -607,7 +608,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -696,7 +697,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getTask("id5")).andReturn(Optional.of(id3)).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -728,7 +729,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -790,8 +791,8 @@ public class KafkaSupervisorTest extends EasyMockSupport supervisor = getSupervisor(2, 1, true, "PT1H", null, null, false); addSomeEvents(1); - DateTime now = DateTime.now(); - DateTime maxi = DateTime.now().plusMinutes(60); + DateTime now = DateTimes.nowUtc(); + DateTime maxi = now.plusMinutes(60); Task id1 = createKafkaIndexTask( "id1", DATASOURCE, @@ -876,7 +877,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -976,10 +977,10 @@ public class KafkaSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(KafkaIndexTask.Status.READING)) .anyTimes(); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTime.now().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTime.now())); + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTime.now())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .times(2); expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) @@ -1202,7 +1203,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); - final DateTime startTime = new DateTime(); + final DateTime startTime = DateTimes.nowUtc(); supervisor = getSupervisor(1, 1, true, "PT1H", null, null, false); addSomeEvents(6); @@ -1383,10 +1384,10 @@ public class KafkaSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(KafkaIndexTask.Status.READING)) .anyTimes(); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTime.now().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTime.now())); + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTime.now())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .times(2); expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.>immediateFailedFuture(new RuntimeException())).times(2); @@ -1450,10 +1451,10 @@ public class KafkaSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(KafkaIndexTask.Status.READING)) .anyTimes(); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTime.now().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTime.now())); + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTime.now())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .times(2); expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) @@ -1508,7 +1509,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); - final DateTime startTime = new DateTime(); + final DateTime startTime = DateTimes.nowUtc(); supervisor = getSupervisor(2, 1, true, "PT1H", null, null, false); addSomeEvents(1); @@ -1697,7 +1698,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); - final DateTime startTime = new DateTime(); + final DateTime startTime = DateTimes.nowUtc(); supervisor = getSupervisor(2, 1, true, "PT1H", null, null, false); addSomeEvents(1); diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java index aa274985474..5a7f5755326 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java @@ -19,7 +19,7 @@ package io.druid.server.lookup.namespace; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; diff --git a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java index 19a95087ca1..863c0619bc9 100644 --- a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java +++ b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimestampSpec; import io.druid.java.util.common.parsers.ParseException; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import org.junit.Before; import org.junit.Test; @@ -116,7 +117,7 @@ public class ProtobufInputRowParserTest ProtobufInputRowParser parser = new ProtobufInputRowParser(parseSpec, "prototest.desc", "ProtoTestEvent"); //create binary of proto test event - DateTime dateTime = new DateTime(2012, 07, 12, 9, 30); + DateTime dateTime = new DateTime(2012, 07, 12, 9, 30, ISOChronology.getInstanceUTC()); ProtoTestEventWrapper.ProtoTestEvent event = ProtoTestEventWrapper.ProtoTestEvent.newBuilder() .setDescription("description") .setEventType(ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE) diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java index d8b1450bf96..75a4d2f0f13 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java @@ -27,11 +27,11 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; import org.jets3t.service.impl.rest.httpclient.RestS3Service; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; @@ -73,7 +73,7 @@ public class S3DataSegmentArchiverTest .binaryVersion(1) .dataSource("dataSource") .dimensions(ImmutableList.of()) - .interval(Interval.parse("2015/2016")) + .interval(Intervals.of("2015/2016")) .version("version") .loadSpec(ImmutableMap.of( "type", diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java index 00e01700089..5d0e4bd4b00 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java @@ -31,6 +31,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; @@ -42,7 +43,6 @@ import org.jets3t.service.StorageObjectsChunk; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; import org.jets3t.service.model.StorageObject; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -62,62 +62,47 @@ public class S3DataSegmentFinderTest { private static final ObjectMapper mapper = new DefaultObjectMapper(); - private static final DataSegment SEGMENT_1 = DataSegment.builder() - .dataSource("wikipedia") - .interval( - new Interval( - "2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z" - ) - ) - .version("2015-10-21T22:07:57.074Z") - .loadSpec( - ImmutableMap.of( - "type", - "s3_zip", - "bucket", - "bucket1", - "key", - "abc/somewhere/index.zip" - ) - ) - .dimensions(ImmutableList.of("language", "page")) - .metrics(ImmutableList.of("count")) - .build(); + private static final DataSegment SEGMENT_1 = DataSegment + .builder() + .dataSource("wikipedia") + .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) + .version("2015-10-21T22:07:57.074Z") + .loadSpec( + ImmutableMap.of( + "type", + "s3_zip", + "bucket", + "bucket1", + "key", + "abc/somewhere/index.zip" + ) + ) + .dimensions(ImmutableList.of("language", "page")) + .metrics(ImmutableList.of("count")) + .build(); - private static final DataSegment SEGMENT_2 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z" - ) - ) - .build(); + private static final DataSegment SEGMENT_2 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z")) + .build(); - private static final DataSegment SEGMENT_3 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .version("2015-10-22T22:07:57.074Z") - .build(); + private static final DataSegment SEGMENT_3 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .version("2015-10-22T22:07:57.074Z") + .build(); - private static final DataSegment SEGMENT_4_0 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(0, 2)) - .build(); + private static final DataSegment SEGMENT_4_0 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(0, 2)) + .build(); - private static final DataSegment SEGMENT_4_1 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(1, 2)) - .build(); + private static final DataSegment SEGMENT_4_1 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(1, 2)) + .build(); @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java index 4b9653219c8..fbd676f700b 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; @@ -33,7 +33,6 @@ import org.jets3t.service.ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; import org.jets3t.service.model.StorageObject; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -44,7 +43,7 @@ public class S3DataSegmentMoverTest { private static final DataSegment sourceSegment = new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), + Intervals.of("2013-01-01/2013-01-02"), "1", ImmutableMap.of( "key", @@ -119,7 +118,7 @@ public class S3DataSegmentMoverTest S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mover.move(new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), + Intervals.of("2013-01-01/2013-01-02"), "1", ImmutableMap.of( "key", @@ -142,7 +141,7 @@ public class S3DataSegmentMoverTest S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mover.move(new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), + Intervals.of("2013-01-01/2013-01-02"), "1", ImmutableMap.of( "key", diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java index a8bcbde4e8c..32818b17e48 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.commons.io.IOUtils; @@ -32,7 +33,6 @@ import org.easymock.EasyMock; import org.easymock.IAnswer; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -103,7 +103,7 @@ public class S3DataSegmentPusherTest DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java index 3799d03d06d..48c75c4fda3 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; @@ -99,7 +100,7 @@ public class VarianceTestHelper extends QueryRunnerTestHelper for (int i = 0; i < values.length; i++) { theVals.put(names[i], values[i]); } - DateTime ts = new DateTime(timestamp); + DateTime ts = DateTimes.of(timestamp); return new MapBasedRow(ts, theVals); } } diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java index 707e1364ce4..aeafee083ba 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java @@ -20,6 +20,7 @@ package io.druid.query.aggregation.variance; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryPlus; @@ -31,7 +32,6 @@ import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesQueryRunnerTest; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -79,7 +79,7 @@ public class VarianceTimeseriesQueryTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( VarianceTestHelper.of( "rows", 13L, @@ -92,7 +92,7 @@ public class VarianceTimeseriesQueryTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( VarianceTestHelper.of( "rows", 13L, diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java index 170592aac50..415e0aa3022 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java @@ -22,6 +22,7 @@ package io.druid.query.aggregation.variance; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; @@ -38,7 +39,6 @@ import io.druid.query.topn.TopNQueryQueryToolChest; import io.druid.query.topn.TopNQueryRunnerTest; import io.druid.query.topn.TopNResultValue; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -92,7 +92,7 @@ public class VarianceTopNQueryTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() diff --git a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java index 853fc98542e..2db3c2cad46 100644 --- a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java +++ b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import org.apache.commons.codec.binary.Base64; import org.junit.Assert; import org.junit.Ignore; @@ -36,13 +37,13 @@ import java.security.MessageDigest; import java.util.Arrays; import java.util.Collection; import java.util.List; -import java.util.Locale; import java.util.Random; /** */ public class HyperLogLogCollectorTest { + private static final Logger log = new Logger(HyperLogLogCollectorTest.class); private final HashFunction fn = Hashing.murmur3_128(); @@ -118,15 +119,10 @@ public class HyperLogLogCollectorTest int n = count; - System.out.println("True cardinality " + n); - System.out.println("Rolling buffer cardinality " + rolling.estimateCardinality()); - System.out.println("Simple buffer cardinality " + simple.estimateCardinality()); - System.out.println( - StringUtils.format( - "Rolling cardinality estimate off by %4.1f%%", - 100 * (1 - rolling.estimateCardinality() / n) - ) - ); + log.info("True cardinality " + n); + log.info("Rolling buffer cardinality " + rolling.estimateCardinality()); + log.info("Simple buffer cardinality " + simple.estimateCardinality()); + log.info("Rolling cardinality estimate off by %4.1f%%", 100 * (1 - rolling.estimateCardinality() / n)); Assert.assertEquals(n, simple.estimateCardinality(), n * 0.05); Assert.assertEquals(n, rolling.estimateCardinality(), n * 0.05); @@ -145,22 +141,13 @@ public class HyperLogLogCollectorTest theCollector.add(fn.hashLong(count).asBytes()); rolling.fold(theCollector); } - System.out.printf( - Locale.ENGLISH, - "testHighCardinalityRollingFold2 took %d ms%n", - System.currentTimeMillis() - start - ); + log.info("testHighCardinalityRollingFold2 took %d ms", System.currentTimeMillis() - start); int n = count; - System.out.println("True cardinality " + n); - System.out.println("Rolling buffer cardinality " + rolling.estimateCardinality()); - System.out.println( - StringUtils.format( - "Rolling cardinality estimate off by %4.1f%%", - 100 * (1 - rolling.estimateCardinality() / n) - ) - ); + log.info("True cardinality " + n); + log.info("Rolling buffer cardinality " + rolling.estimateCardinality()); + log.info("Rolling cardinality estimate off by %4.1f%%", 100 * (1 - rolling.estimateCardinality() / n)); Assert.assertEquals(n, rolling.estimateCardinality(), n * 0.05); } @@ -843,9 +830,8 @@ public class HyperLogLogCollectorTest error += errorThisTime; - System.out.printf( - Locale.ENGLISH, - "%,d ==? %,f in %,d millis. actual error[%,f%%], avg. error [%,f%%]%n", + log.info( + "%,d ==? %,f in %,d millis. actual error[%,f%%], avg. error [%,f%%]", numThings, estimatedValue, System.currentTimeMillis() - startTime, diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java b/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java index 4270e008fc6..93df764780d 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/Bucket.java @@ -20,9 +20,8 @@ package io.druid.indexer; import com.google.common.annotations.VisibleForTesting; - +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; - import org.joda.time.DateTime; import java.nio.ByteBuffer; @@ -118,7 +117,7 @@ public class Bucket { ByteBuffer buf = ByteBuffer.wrap(keyBytes); - Bucket bucket = new Bucket(buf.getInt(), new DateTime(buf.getLong()), buf.getInt()); + Bucket bucket = new Bucket(buf.getInt(), DateTimes.utc(buf.getLong()), buf.getInt()); byte[] bytesLeft = new byte[buf.remaining()]; buf.get(bytesLeft); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java index c97b7d75c18..7d0e2a9cb46 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java @@ -31,6 +31,7 @@ import com.google.common.io.Closeables; import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.hll.HyperLogLogCollector; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; @@ -257,14 +258,14 @@ public class DetermineHashedPartitionsJob implements Jobby if (determineIntervals) { interval = config.getGranularitySpec() .getSegmentGranularity() - .bucket(new DateTime(inputRow.getTimestampFromEpoch())); + .bucket(DateTimes.utc(inputRow.getTimestampFromEpoch())); if (!hyperLogLogs.containsKey(interval)) { hyperLogLogs.put(interval, HyperLogLogCollector.makeLatestCollector()); } } else { final Optional maybeInterval = config.getGranularitySpec() - .bucketInterval(new DateTime(inputRow.getTimestampFromEpoch())); + .bucketInterval(DateTimes.utc(inputRow.getTimestampFromEpoch())); if (!maybeInterval.isPresent()) { throw new ISE("WTF?! No bucket found for timestamp: %s", inputRow.getTimestampFromEpoch()); @@ -324,7 +325,7 @@ public class DetermineHashedPartitionsJob implements Jobby HyperLogLogCollector.makeCollector(ByteBuffer.wrap(value.getBytes(), 0, value.getLength())) ); } - Optional intervalOptional = config.getGranularitySpec().bucketInterval(new DateTime(key.get())); + Optional intervalOptional = config.getGranularitySpec().bucketInterval(DateTimes.utc(key.get())); if (!intervalOptional.isPresent()) { throw new ISE("WTF?! No bucket found for timestamp: %s", key.get()); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java index a1182969f33..a1848dba554 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java @@ -37,6 +37,7 @@ import io.druid.collections.CombiningIterable; import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; @@ -67,6 +68,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import java.io.IOException; import java.io.OutputStream; @@ -322,7 +324,7 @@ public class DeterminePartitionsJob implements Jobby { final List timeAndDims = HadoopDruidIndexerConfig.JSON_MAPPER.readValue(key.getBytes(), List.class); - final DateTime timestamp = new DateTime(timeAndDims.get(0)); + final DateTime timestamp = new DateTime(timeAndDims.get(0), ISOChronology.getInstanceUTC()); final Map> dims = (Map>) timeAndDims.get(1); helper.emitDimValueCounts(context, timestamp, dims); @@ -359,7 +361,7 @@ public class DeterminePartitionsJob implements Jobby for (final String dim : inputRow.getDimensions()) { dims.put(dim, inputRow.getDimension(dim)); } - helper.emitDimValueCounts(context, new DateTime(inputRow.getTimestampFromEpoch()), dims); + helper.emitDimValueCounts(context, DateTimes.utc(inputRow.getTimestampFromEpoch()), dims); } } @@ -566,7 +568,7 @@ public class DeterminePartitionsJob implements Jobby { final ByteBuffer groupKey = ByteBuffer.wrap(keyBytes.getGroupKey()); groupKey.position(4); // Skip partition - final DateTime bucket = new DateTime(groupKey.getLong()); + final DateTime bucket = DateTimes.utc(groupKey.getLong()); final PeekingIterator iterator = Iterators.peekingIterator(combinedIterable.iterator()); log.info( diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index 6b718528b92..0f53e8614e0 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -37,7 +37,6 @@ import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.guice.GuiceInjectors; @@ -46,6 +45,8 @@ import io.druid.guice.annotations.Self; import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.path.PathSpec; import io.druid.initialization.Initialization; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.FunctionalIterable; @@ -410,9 +411,7 @@ public class HadoopDruidIndexerConfig public Optional getBucket(InputRow inputRow) { final Optional timeBucket = schema.getDataSchema().getGranularitySpec().bucketInterval( - new DateTime( - inputRow.getTimestampFromEpoch() - ) + DateTimes.utc(inputRow.getTimestampFromEpoch()) ); if (!timeBucket.isPresent()) { return Optional.absent(); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java index 5e20467dd0c..d50549c1ee9 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java @@ -22,13 +22,13 @@ package io.druid.indexer; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.RE; import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.parsers.ParseException; import io.druid.segment.indexing.granularity.GranularitySpec; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; -import org.joda.time.DateTime; import java.io.IOException; @@ -82,7 +82,7 @@ public abstract class HadoopDruidIndexerMapper extends Mapper< } if (!granularitySpec.bucketIntervals().isPresent() - || granularitySpec.bucketInterval(new DateTime(inputRow.getTimestampFromEpoch())) + || granularitySpec.bucketInterval(DateTimes.utc(inputRow.getTimestampFromEpoch())) .isPresent()) { innerMap(inputRow, value, context, reportParseExceptions); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java index cc25ac18b2d..aeb72c033f8 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java @@ -27,9 +27,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.indexer.partitions.HashedPartitionsSpec; import io.druid.indexer.partitions.PartitionsSpec; +import io.druid.java.util.common.DateTimes; import io.druid.segment.IndexSpec; import io.druid.segment.indexing.TuningConfig; -import org.joda.time.DateTime; import java.util.List; import java.util.Map; @@ -50,7 +50,7 @@ public class HadoopTuningConfig implements TuningConfig { return new HadoopTuningConfig( null, - new DateTime().toString(), + DateTimes.nowUtc().toString(), DEFAULT_PARTITIONS_SPEC, DEFAULT_SHARD_SPECS, DEFAULT_INDEX_SPEC, @@ -115,7 +115,7 @@ public class HadoopTuningConfig implements TuningConfig ) { this.workingPath = workingPath; - this.version = version == null ? new DateTime().toString() : version; + this.version = version == null ? DateTimes.nowUtc().toString() : version; this.partitionsSpec = partitionsSpec == null ? DEFAULT_PARTITIONS_SPEC : partitionsSpec; this.shardSpecs = shardSpecs == null ? DEFAULT_SHARD_SPECS : shardSpecs; this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java index cd713880943..180d4379848 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java @@ -27,6 +27,7 @@ import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.common.io.OutputSupplier; import io.druid.indexer.updater.HadoopDruidConverterConfig; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; import io.druid.java.util.common.IOE; @@ -51,7 +52,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; -import org.joda.time.DateTime; import java.io.BufferedOutputStream; import java.io.File; @@ -622,10 +622,10 @@ public class JobHelper log.info( "File[%s / %s / %sB] existed, but wasn't the same as [%s / %s / %sB]", finalIndexZipFile.getPath(), - new DateTime(finalIndexZipFile.getModificationTime()), + DateTimes.utc(finalIndexZipFile.getModificationTime()), finalIndexZipFile.getLen(), zipFile.getPath(), - new DateTime(zipFile.getModificationTime()), + DateTimes.utc(zipFile.getModificationTime()), zipFile.getLen() ); outputFS.delete(finalIndexZipFilePath, false); @@ -634,7 +634,7 @@ public class JobHelper log.info( "File[%s / %s / %sB] existed and will be kept", finalIndexZipFile.getPath(), - new DateTime(finalIndexZipFile.getModificationTime()), + DateTimes.utc(finalIndexZipFile.getModificationTime()), finalIndexZipFile.getLen() ); needRename = false; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java index 63d10450c2c..87e671f5878 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.filter.DimFilter; import io.druid.timeline.DataSegment; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java index 6d9a6b91b92..1e789980122 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java @@ -24,10 +24,11 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; -import io.druid.java.util.common.StringUtils; -import io.druid.java.util.common.granularity.Granularity; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.hadoop.FSSpideringIterator; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Comparators; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.hadoop.fs.FileStatus; @@ -88,7 +89,7 @@ public class GranularUnprocessedPathSpec extends GranularityPathSpec Set bucketsToRun = Sets.newTreeSet(Comparators.intervals()); for (Map.Entry entry : inputModifiedTimes.entrySet()) { - DateTime timeBucket = new DateTime(entry.getKey()); + DateTime timeBucket = DateTimes.utc(entry.getKey()); long mTime = entry.getValue(); String bucketOutput = StringUtils.format( diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java index ee99b0512a3..0dc0550f4cb 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java @@ -168,7 +168,7 @@ public class GranularityPathSpec implements PathSpec end = inputInterval.getEndMillis(); makeNew = true; } - return makeNew ? new Interval(start, end) : interval; + return makeNew ? new Interval(start, end, interval.getChronology()) : interval; } } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java index bfee0b37673..829a52ea8f9 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java @@ -35,6 +35,7 @@ import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; @@ -71,8 +72,8 @@ public class BatchDeltaIngestionTest private static final ObjectMapper MAPPER; private static final IndexIO INDEX_IO; - private static final Interval INTERVAL_FULL = new Interval("2014-10-22T00:00:00Z/P1D"); - private static final Interval INTERVAL_PARTIAL = new Interval("2014-10-22T00:00:00Z/PT2H"); + private static final Interval INTERVAL_FULL = Intervals.of("2014-10-22T00:00:00Z/P1D"); + private static final Interval INTERVAL_PARTIAL = Intervals.of("2014-10-22T00:00:00Z/PT2H"); private static final DataSegment SEGMENT; static { diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java index ec026eeaede..e021abf9370 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java @@ -21,10 +21,12 @@ package io.druid.indexer; import com.google.common.primitives.Bytes; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import org.hamcrest.number.OrderingComparison; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -39,7 +41,7 @@ public class BucketTest @Before public void setUp() { - time = new DateTime(2014, 11, 24, 10, 30); + time = new DateTime(2014, 11, 24, 10, 30, ISOChronology.getInstanceUTC()); shardNum = 1; partitionNum = 1; bucket = new Bucket(shardNum, time, partitionNum); @@ -80,10 +82,12 @@ public class BucketTest bucket.equals(new Bucket(shardNum, time, partitionNum + 1))); Assert.assertFalse("Objects do not have the same shardNum", bucket.equals(new Bucket(shardNum + 1, time, partitionNum))); - Assert.assertFalse("Objects do not have the same time", bucket.equals(new Bucket(shardNum, new DateTime(), partitionNum))); + Assert.assertFalse( + "Objects do not have the same time", + bucket.equals(new Bucket(shardNum, DateTimes.nowUtc(), partitionNum)) + ); Assert.assertFalse("Object do have NULL time", bucket.equals(new Bucket(shardNum, null, partitionNum))); Assert.assertTrue("Objects must be the same", bucket.equals(new Bucket(shardNum, time, partitionNum))); - } @Test public void testHashCode() diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java index db2fe76e5c4..31219d64bb3 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java @@ -27,12 +27,12 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.partitions.HashedPartitionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -147,7 +147,7 @@ public class DetermineHashedPartitionsJobTest new UniformGranularitySpec( Granularities.DAY, Granularities.NONE, - ImmutableList.of(new Interval(interval)) + ImmutableList.of(Intervals.of(interval)) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java index a802d3ca037..af7dfba194c 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java @@ -27,6 +27,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -34,7 +35,6 @@ import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.timeline.partition.SingleDimensionShardSpec; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Test; @@ -237,7 +237,7 @@ public class DeterminePartitionsJobTest ), new AggregatorFactory[]{new LongSumAggregatorFactory("visited_num", "visited_num")}, new UniformGranularitySpec( - Granularities.DAY, Granularities.NONE, ImmutableList.of(new Interval(interval)) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of(interval)) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java index 3c1ffdc1fdd..f91f9b6d5de 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -26,14 +26,14 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.timeline.partition.HashBasedNumberedShardSpec; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -71,7 +71,7 @@ public class HadoopDruidIndexerConfigTest new UniformGranularitySpec( Granularities.MINUTE, Granularities.MINUTE, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), @@ -80,7 +80,7 @@ public class HadoopDruidIndexerConfigTest null, null, null, - ImmutableMap.of(new DateTime("2010-01-01T01:00:00").getMillis(), specs), + ImmutableMap.of(DateTimes.of("2010-01-01T01:00:00").getMillis(), specs), null, null, false, @@ -110,9 +110,9 @@ public class HadoopDruidIndexerConfigTest "dim2", "4" ); - final long timestamp = new DateTime("2010-01-01T01:00:01").getMillis(); + final long timestamp = DateTimes.of("2010-01-01T01:00:01").getMillis(); final Bucket expectedBucket = config.getBucket(new MapBasedInputRow(timestamp, dims, values)).get(); - final long nextBucketTimestamp = Granularities.MINUTE.bucketEnd(new DateTime(timestamp)).getMillis(); + final long nextBucketTimestamp = Granularities.MINUTE.bucketEnd(DateTimes.utc(timestamp)).getMillis(); // check that all rows having same set of dims and truncated timestamp hash to same bucket for (int i = 0; timestamp + i < nextBucketTimestamp; i++) { Assert.assertEquals( @@ -134,7 +134,7 @@ public class HadoopDruidIndexerConfigTest new UniformGranularitySpec( Granularities.MINUTE, Granularities.MINUTE, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), @@ -143,12 +143,12 @@ public class HadoopDruidIndexerConfigTest null, null, null, - ImmutableMap.>of(new DateTime("2010-01-01T01:00:00").getMillis(), + ImmutableMap.>of(DateTimes.of("2010-01-01T01:00:00").getMillis(), Lists.newArrayList(new HadoopyShardSpec( NoneShardSpec.instance(), 1 )), - new DateTime("2010-01-01T02:00:00").getMillis(), + DateTimes.of("2010-01-01T02:00:00").getMillis(), Lists.newArrayList(new HadoopyShardSpec( NoneShardSpec.instance(), 2 @@ -183,10 +183,10 @@ public class HadoopDruidIndexerConfigTest "dim2", "4" ); - final long ts1 = new DateTime("2010-01-01T01:00:01").getMillis(); + final long ts1 = DateTimes.of("2010-01-01T01:00:01").getMillis(); Assert.assertEquals(config.getBucket(new MapBasedInputRow(ts1, dims, values)).get().getShardNum(), 1); - final long ts2 = new DateTime("2010-01-01T02:00:01").getMillis(); + final long ts2 = DateTimes.of("2010-01-01T02:00:01").getMillis(); Assert.assertEquals(config.getBucket(new MapBasedInputRow(ts2, dims, values)).get().getShardNum(), 2); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java index 1750bae6b8a..097402384af 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java @@ -30,12 +30,12 @@ import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; import io.druid.indexer.updater.MetadataStorageUpdaterJobSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Assert; import org.junit.Test; @@ -78,7 +78,7 @@ public class HadoopIngestionSpecTest Assert.assertEquals( "getIntervals", - Lists.newArrayList(new Interval("2012-01-01/P1D")), + Lists.newArrayList(Intervals.of("2012-01-01/P1D")), granularitySpec.getIntervals().get() ); diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java index 9154410875c..6899ccb3954 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java @@ -32,6 +32,7 @@ import io.druid.indexer.path.PathSpec; import io.druid.indexer.path.StaticPathSpec; import io.druid.indexer.path.UsedSegmentLister; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -51,8 +52,8 @@ import java.util.Map; public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest { private final String testDatasource = "test"; - private final Interval testDatasourceInterval = new Interval("1970/3000"); - private final Interval testDatasourceIntervalPartial = new Interval("2050/3000"); + private final Interval testDatasourceInterval = Intervals.of("1970/3000"); + private final Interval testDatasourceIntervalPartial = Intervals.of("2050/3000"); private final ObjectMapper jsonMapper; public HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest() @@ -65,7 +66,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest private static final DataSegment SEGMENT = new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -233,9 +234,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of( - new Interval("2010-01-01/P1D") - ) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java index 0815581fbf3..5592fc3694e 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java @@ -22,13 +22,15 @@ package io.druid.indexer; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -40,8 +42,6 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.mapreduce.Reducer; import org.easymock.Capture; import org.easymock.EasyMock; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -80,7 +80,7 @@ public class IndexGeneratorCombinerTest new HyperUniquesAggregatorFactory("unique_hosts", "host") }, new UniformGranularitySpec( - Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2010/2011")) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of("2010/2011")) ), HadoopDruidIndexerConfig.JSON_MAPPER ), @@ -136,7 +136,7 @@ public class IndexGeneratorCombinerTest { long timestamp = System.currentTimeMillis(); - Bucket bucket = new Bucket(0, new DateTime(timestamp), 0); + Bucket bucket = new Bucket(0, DateTimes.utc(timestamp), 0); SortableBytes keySortableBytes = new SortableBytes( bucket.toGroupKey(), new byte[0] @@ -195,7 +195,7 @@ public class IndexGeneratorCombinerTest { long timestamp = System.currentTimeMillis(); - Bucket bucket = new Bucket(0, new DateTime(timestamp), 0); + Bucket bucket = new Bucket(0, DateTimes.utc(timestamp), 0); SortableBytes keySortableBytes = new SortableBytes( bucket.toGroupKey(), new byte[0] diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java index 412b5718c0b..f70d7f1e1e3 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java @@ -31,6 +31,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.RE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; @@ -423,7 +424,7 @@ public class IndexGeneratorJobTest this.useCombiner = useCombiner; this.partitionType = partitionType; this.shardInfoForEachSegment = shardInfoForEachSegment; - this.interval = new Interval(interval); + this.interval = Intervals.of(interval); this.data = data; this.inputFormatName = inputFormatName; this.inputRowParser = inputRowParser; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java index 1fcef910095..4ab2e6bba37 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java @@ -25,6 +25,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -60,7 +61,7 @@ public class JobHelperTest private HadoopDruidIndexerConfig config; private File tmpDir; private File dataFile; - private Interval interval = new Interval("2014-10-22T00:00:00Z/P1D"); + private Interval interval = Intervals.of("2014-10-22T00:00:00Z/P1D"); @Before public void setup() throws Exception @@ -155,7 +156,7 @@ public class JobHelperTest { DataSegment segment = new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "google", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java index e935af6bab5..adbeeea4486 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java @@ -22,6 +22,7 @@ package io.druid.indexer.hadoop; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import io.druid.java.util.common.Intervals; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.TestHelper; import io.druid.timeline.DataSegment; @@ -40,7 +41,7 @@ public class DatasourceIngestionSpecTest @Test public void testSingleIntervalSerde() throws Exception { - Interval interval = Interval.parse("2014/2015"); + Interval interval = Intervals.of("2014/2015"); DatasourceIngestionSpec expected = new DatasourceIngestionSpec( "test", @@ -74,7 +75,7 @@ public class DatasourceIngestionSpecTest DatasourceIngestionSpec.class ); - List intervals = ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2016/2017")); + List intervals = ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")); DatasourceIngestionSpec expected = new DatasourceIngestionSpec( "test", @@ -119,7 +120,7 @@ public class DatasourceIngestionSpecTest ImmutableList.of( new DataSegment( "test", - Interval.parse("2014/2017"), + Intervals.of("2014/2017"), "v0", null, null, @@ -152,7 +153,7 @@ public class DatasourceIngestionSpecTest DatasourceIngestionSpec actual = MAPPER.readValue(jsonStr, DatasourceIngestionSpec.class); Assert.assertEquals( - new DatasourceIngestionSpec("test", Interval.parse("2014/2015"), null, null, null, null, null, false), + new DatasourceIngestionSpec("test", Intervals.of("2014/2015"), null, null, null, null, null, false), actual ); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java index c36f1343d4a..6177cf76927 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java @@ -29,6 +29,7 @@ import com.google.common.collect.Sets; import com.google.common.io.Files; import io.druid.indexer.JobHelper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.fs.BlockLocation; @@ -42,7 +43,6 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -74,7 +74,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -90,7 +90,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test2", - Interval.parse("2050/3000"), + Intervals.of("2050/3000"), "ver", ImmutableMap.of( "type", "hdfs", @@ -106,7 +106,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test3", - Interval.parse("2030/3000"), + Intervals.of("2030/3000"), "ver", ImmutableMap.of( "type", "hdfs", @@ -287,7 +287,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java index dcbb3d6483a..0140ede1545 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.ByteArrayDataOutput; import com.google.common.io.ByteStreams; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; @@ -39,13 +40,13 @@ public class DatasourceInputSplitTest @Test public void testSerde() throws Exception { - Interval interval = Interval.parse("2000/3000"); + Interval interval = Intervals.of("2000/3000"); DatasourceInputSplit expected = new DatasourceInputSplit( Lists.newArrayList( new WindowedDataSegment( new DataSegment( "test", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java index 48d23bc8539..f350bb7ff61 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; @@ -36,7 +37,7 @@ public class WindowedDataSegmentTest private static final ObjectMapper MAPPER = new DefaultObjectMapper(); private static final DataSegment SEGMENT = new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -65,7 +66,7 @@ public class WindowedDataSegmentTest @Test public void testSerdePartialWindow() throws IOException { - final Interval partialInterval = new Interval("2500/3000"); + final Interval partialInterval = Intervals.of("2500/3000"); final WindowedDataSegment windowedDataSegment = new WindowedDataSegment(SEGMENT, partialInterval); final WindowedDataSegment roundTrip = MAPPER.readValue( MAPPER.writeValueAsBytes(windowedDataSegment), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java index d0a74653b7d..fd993255738 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java @@ -44,6 +44,7 @@ import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -56,7 +57,6 @@ import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -74,7 +74,7 @@ public class DatasourcePathSpecTest { this.ingestionSpec = new DatasourceIngestionSpec( "test", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), null, null, null, @@ -87,7 +87,7 @@ public class DatasourcePathSpecTest WindowedDataSegment.of( new DataSegment( ingestionSpec.getDataSource(), - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -103,7 +103,7 @@ public class DatasourcePathSpecTest WindowedDataSegment.of( new DataSegment( ingestionSpec.getDataSource(), - Interval.parse("2050/3000"), + Intervals.of("2050/3000"), "ver", ImmutableMap.of( "type", "hdfs", @@ -278,7 +278,7 @@ public class DatasourcePathSpecTest new LongSumAggregatorFactory("visited_sum", "visited") }, new UniformGranularitySpec( - Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2000/3000")) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of("2000/3000")) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java index 6317f1be111..9e3a0447530 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java @@ -29,6 +29,7 @@ import io.druid.indexer.HadoopIOConfig; import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.HadoopTuningConfig; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -40,7 +41,6 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.security.UserGroupInformation; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; @@ -151,7 +151,7 @@ public class GranularityPathSpecTest new UniformGranularitySpec( Granularities.DAY, Granularities.MINUTE, - ImmutableList.of(new Interval("2015-11-06T00:00Z/2015-11-07T00:00Z")) + ImmutableList.of(Intervals.of("2015-11-06T00:00Z/2015-11-07T00:00Z")) ), jsonMapper ), @@ -202,7 +202,7 @@ public class GranularityPathSpecTest new UniformGranularitySpec( Granularities.DAY, Granularities.ALL, - ImmutableList.of(new Interval("2015-01-01T11Z/2015-01-02T05Z")) + ImmutableList.of(Intervals.of("2015-01-01T11Z/2015-01-02T05Z")) ), jsonMapper ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java index b25e91d4f43..61350ac9a88 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java @@ -42,6 +42,7 @@ import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; import io.druid.indexer.SQLMetadataStorageUpdaterJobHandler; import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.metadata.MetadataSegmentManagerConfig; import io.druid.metadata.MetadataStorageConnectorConfig; @@ -103,7 +104,7 @@ public class HadoopConverterJobTest private Supplier metadataStorageTablesConfigSupplier; private DerbyConnector connector; - private final Interval interval = Interval.parse("2011-01-01T00:00:00.000Z/2011-05-01T00:00:00.000Z"); + private final Interval interval = Intervals.of("2011-01-01T00:00:00.000Z/2011-05-01T00:00:00.000Z"); @After public void tearDown() diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java index 2c50969f59e..e2bbcb76a8d 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java @@ -23,9 +23,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexSpec; import io.druid.timeline.DataSegment; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -44,7 +44,7 @@ public class HadoopDruidConverterConfigTest { final HadoopDruidConverterConfig config = new HadoopDruidConverterConfig( "datasource", - Interval.parse("2000/2010"), + Intervals.of("2000/2010"), new IndexSpec(), ImmutableList.of(), true, diff --git a/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java b/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java index 6ee38cbda84..0cec2c8be06 100644 --- a/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java +++ b/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java @@ -23,7 +23,7 @@ import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.segment.realtime.appenderator.SegmentIdentifier; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java index 6e4e52c6b5d..727afbe8315 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java @@ -25,7 +25,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.indexing.common.task.Task; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java index 67bf7227d3e..442204edd6b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java @@ -28,9 +28,9 @@ import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.LockListAction; +import io.druid.java.util.common.DateTimes; import io.druid.query.Query; import io.druid.query.QueryRunner; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.IOException; @@ -86,7 +86,7 @@ public abstract class AbstractTask implements Task dataSource, interval.getStart(), interval.getEnd(), - new DateTime().toString() + DateTimes.nowUtc().toString() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java index 8cbb8c434d3..a16a6547d11 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java @@ -27,6 +27,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.indexing.common.TaskToolbox; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Comparators; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.IndexMerger; @@ -84,7 +85,7 @@ public class AppendTask extends MergeTaskBase final Iterable segmentsToMerge = Iterables.concat( Iterables.transform( - timeline.lookup(new Interval("1000-01-01/3000-01-01")), + timeline.lookup(Intervals.of("1000-01-01/3000-01-01")), new Function, Iterable>() { @Override diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java index b223bdabcf1..5ab8251d7f5 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java @@ -27,12 +27,12 @@ import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; - import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.logger.Logger; @@ -40,7 +40,6 @@ import io.druid.segment.IndexIO; import io.druid.segment.IndexSpec; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.File; @@ -120,7 +119,7 @@ public class ConvertSegmentTask extends AbstractFixedIntervalTask { Preconditions.checkNotNull(dataSource, "dataSource"); Preconditions.checkNotNull(interval, "interval"); - return joinId(TYPE, dataSource, interval.getStart(), interval.getEnd(), new DateTime()); + return joinId(TYPE, dataSource, interval.getStart(), interval.getEnd(), DateTimes.nowUtc()); } @JsonCreator diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java index 275bf7b31da..ae79eb64ae0 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java @@ -29,7 +29,6 @@ import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import io.druid.common.utils.JodaUtils; import io.druid.indexer.HadoopDruidDetermineConfigurationJob; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.HadoopDruidIndexerJob; @@ -43,10 +42,11 @@ import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockTryAcquireAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.hadoop.OverlordActionBasedUsedSegmentLister; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.List; @@ -93,7 +93,7 @@ public class HadoopIndexTask extends HadoopTask ) { super( - id != null ? id : StringUtils.format("index_hadoop_%s_%s", getTheDataSource(spec), new DateTime()), + id != null ? id : StringUtils.format("index_hadoop_%s_%s", getTheDataSource(spec), DateTimes.nowUtc()), getTheDataSource(spec), hadoopDependencyCoordinates == null ? (hadoopCoordinates == null ? null : ImmutableList.of(hadoopCoordinates)) diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java index 1477d91b530..bc68c7b4ed9 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java @@ -36,7 +36,8 @@ import com.google.common.collect.Iterables; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.util.concurrent.ListenableFuture; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; @@ -116,7 +117,11 @@ public class IndexTask extends AbstractTask private static String makeId(String id, IndexIngestionSpec ingestionSchema) { - return id != null ? id : StringUtils.format("index_%s_%s", makeDataSource(ingestionSchema), new DateTime()); + if (id != null) { + return id; + } else { + return StringUtils.format("index_%s_%s", makeDataSource(ingestionSchema), DateTimes.nowUtc()); + } } private static String makeGroupId(IndexIngestionSpec ingestionSchema) diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java index f6d9b3e860c..13c2660aec8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java @@ -42,6 +42,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.segment.IndexIO; @@ -75,7 +76,7 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask super( // _not_ the version, just something uniqueish id != null ? id : StringUtils.format( - "merge_%s_%s", computeProcessingID(dataSource, segments), new DateTime().toString() + "merge_%s_%s", computeProcessingID(dataSource, segments), DateTimes.nowUtc().toString() ), dataSource, computeMergedInterval(segments), diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java index 487b8e401ee..43ca99bce8a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java @@ -27,10 +27,10 @@ import io.druid.data.input.FirehoseFactory; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; -import org.joda.time.DateTime; import java.util.Map; import java.util.UUID; @@ -74,7 +74,7 @@ public class NoopTask extends AbstractTask ) { super( - id == null ? StringUtils.format("noop_%s_%s", new DateTime(), UUID.randomUUID().toString()) : id, + id == null ? StringUtils.format("noop_%s_%s", DateTimes.nowUtc(), UUID.randomUUID().toString()) : id, "none", context ); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java index 7e641da3f84..fe2a92f2b9d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java @@ -40,6 +40,7 @@ import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockReleaseAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.query.DruidMetrics; @@ -91,7 +92,7 @@ public class RealtimeIndexTask extends AbstractTask return makeTaskId( fireDepartment.getDataSchema().getDataSource(), fireDepartment.getTuningConfig().getShardSpec().getPartitionNum(), - new DateTime(), + DateTimes.nowUtc(), random.nextInt() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java index 3a112e83ebb..b47249eb113 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java @@ -25,10 +25,10 @@ import com.google.common.base.Preconditions; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUsedAction; +import io.druid.java.util.common.DateTimes; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.IndexSpec; import io.druid.timeline.DataSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.List; @@ -102,7 +102,7 @@ public class SameIntervalMergeTask extends AbstractFixedIntervalTask dataSource, interval.getStart(), interval.getEnd(), - new DateTime().toString() + DateTimes.nowUtc().toString() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java index 9e17bfe9fd9..e4034354bcc 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java @@ -53,6 +53,7 @@ import io.druid.indexing.common.tasklogs.LogUtils; import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.config.ForkingTaskRunnerConfig; import io.druid.indexing.worker.config.WorkerConfig; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IOE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; @@ -560,7 +561,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer } } - final DateTime start = new DateTime(); + final DateTime start = DateTimes.nowUtc(); final long timeout = new Interval(start, taskConfig.getGracefulShutdownTimeout()).toDurationMillis(); // Things should be terminating now. Wait for it to happen so logs can be uploaded and all that good stuff. diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java index 0906fef89a1..f41b1d7590c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java @@ -35,6 +35,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.EntryExistsException; import org.joda.time.DateTime; @@ -84,7 +85,7 @@ public class HeapMemoryTaskStorage implements TaskStorage } log.info("Inserting task %s with status: %s", task.getId(), status); - tasks.put(task.getId(), new TaskStuff(task, status, new DateTime())); + tasks.put(task.getId(), new TaskStuff(task, status, DateTimes.nowUtc())); } finally { giant.unlock(); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java index 72dcd3ec34d..d8c1a3f7dfb 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java @@ -34,6 +34,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -134,7 +135,7 @@ public class MetadataTaskStorage implements TaskStorage try { handler.insert( task.getId(), - new DateTime(), + DateTimes.nowUtc(), task.getDataSource(), task, status.isRunnable(), @@ -213,7 +214,7 @@ public class MetadataTaskStorage implements TaskStorage @Override public List getRecentlyFinishedTaskStatuses() { - final DateTime start = new DateTime().minus(config.getRecentlyFinishedThreshold()); + final DateTime start = DateTimes.nowUtc().minus(config.getRecentlyFinishedThreshold()); return ImmutableList.copyOf( Iterables.filter( diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java index 5cbc4a5b346..e0f2908e6a3 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java @@ -64,6 +64,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.setup.WorkerSelectStrategy; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.RE; @@ -84,7 +85,6 @@ import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Period; @@ -1181,7 +1181,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer taskStatus.getStatusCode() ); // Worker is done with this task - zkWorker.setLastCompletedTaskTime(new DateTime()); + zkWorker.setLastCompletedTaskTime(DateTimes.nowUtc()); } else { log.info("Workerless task[%s] completed with status[%s]", taskStatus.getId(), taskStatus.getStatusCode()); } @@ -1206,7 +1206,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer synchronized (blackListedWorkers) { if (zkWorker.getContinuouslyFailedTasksCount() > config.getMaxRetriesBeforeBlacklist() && blackListedWorkers.size() <= zkWorkers.size() * (config.getMaxPercentageBlacklistWorkers() / 100.0) - 1) { - zkWorker.setBlacklistedUntil(DateTime.now().plus(config.getWorkerBlackListBackoffTime())); + zkWorker.setBlacklistedUntil(DateTimes.nowUtc().plus(config.getWorkerBlackListBackoffTime())); if (blackListedWorkers.add(zkWorker)) { log.info( "Blacklisting [%s] until [%s] after [%,d] failed tasks in a row.", diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java index 334b72b46aa..29ab3a7aa65 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java @@ -19,7 +19,7 @@ package io.druid.indexing.overlord; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import java.util.concurrent.ConcurrentSkipListMap; @@ -30,6 +30,6 @@ public class RemoteTaskRunnerWorkQueue extends ConcurrentSkipListMap dsLockbox = dsRunning.navigableKeySet(); final Iterable searchIntervals = Iterables.concat( // Single interval that starts at or before ours - Collections.singletonList(dsLockbox.floor(new Interval(interval.getStart(), new DateTime(JodaUtils.MAX_INSTANT)))), + Collections.singletonList(dsLockbox.floor(new Interval(interval.getStart(), DateTimes.MAX))), // All intervals that start somewhere between our start instant (exclusive) and end instant (exclusive) dsLockbox.subSet( - new Interval(interval.getStart(), new DateTime(JodaUtils.MAX_INSTANT)), + new Interval(interval.getStart(), DateTimes.MAX), false, new Interval(interval.getEnd(), interval.getEnd()), false diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java index b6cd2604457..f3f3e3e066f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.util.concurrent.ListenableFuture; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; /** @@ -36,12 +37,14 @@ public abstract class TaskRunnerWorkItem private final DateTime createdTime; private final DateTime queueInsertionTime; - public TaskRunnerWorkItem( - String taskId, - ListenableFuture result - ) + public TaskRunnerWorkItem(String taskId, ListenableFuture result) { - this(taskId, result, new DateTime(), new DateTime()); + this(taskId, result, DateTimes.nowUtc()); + } + + private TaskRunnerWorkItem(String taskId, ListenableFuture result, DateTime createdTime) + { + this(taskId, result, createdTime, createdTime); } public TaskRunnerWorkItem( diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java index 694d382471a..907704e2d77 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java @@ -42,6 +42,7 @@ import io.druid.indexing.common.TaskToolboxFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.autoscaling.ScalingStats; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -51,7 +52,6 @@ import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; import io.druid.query.SegmentDescriptor; import io.druid.server.DruidNode; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.Collection; @@ -179,7 +179,7 @@ public class ThreadPoolTaskRunner implements TaskRunner, QuerySegmentWalker try { task.stopGracefully(); final TaskStatus taskStatus = item.getResult().get( - new Interval(new DateTime(start), taskConfig.getGracefulShutdownTimeout()).toDurationMillis(), + new Interval(DateTimes.utc(start), taskConfig.getGracefulShutdownTimeout()).toDurationMillis(), TimeUnit.MILLISECONDS ); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java index 1ce283254fe..73e7d0ddbfa 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java @@ -29,6 +29,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; @@ -51,9 +52,8 @@ public class ZkWorker implements Closeable private final Function cacheConverter; private AtomicReference worker; - private AtomicReference lastCompletedTaskTime = new AtomicReference<>(new DateTime()); + private AtomicReference lastCompletedTaskTime = new AtomicReference<>(DateTimes.nowUtc()); private AtomicReference blacklistedUntil = new AtomicReference<>(); - private AtomicInteger continuouslyFailedTasksCount = new AtomicInteger(0); public ZkWorker(Worker worker, PathChildrenCache statusCache, final ObjectMapper jsonMapper) diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java index 89915853e37..ce53188416b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java @@ -22,8 +22,8 @@ package io.druid.indexing.overlord.autoscaling; import com.google.common.base.Supplier; import com.metamx.emitter.EmittingLogger; import io.druid.indexing.overlord.WorkerTaskRunner; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.PeriodGranularity; -import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Period; @@ -91,7 +91,7 @@ public abstract class AbstractWorkerProvisioningStrategy implements Provisioning provisioningSchedulerConfig.getOriginTime(), null ); - final long startTime = granularity.bucketEnd(new DateTime()).getMillis(); + final long startTime = granularity.bucketEnd(DateTimes.nowUtc()).getMillis(); exec.scheduleAtFixedRate( new Runnable() diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java index 8663e8c7fd9..9fc84e38ae0 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java @@ -40,6 +40,7 @@ import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.setup.WorkerSelectStrategy; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Duration; @@ -109,8 +110,8 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr private final Set currentlyProvisioning = Sets.newHashSet(); private final Set currentlyTerminating = Sets.newHashSet(); - private DateTime lastProvisionTime = new DateTime(); - private DateTime lastTerminateTime = new DateTime(); + private DateTime lastProvisionTime = DateTimes.nowUtc(); + private DateTime lastTerminateTime = lastProvisionTime; private PendingProvisioner(WorkerTaskRunner runner) { @@ -159,14 +160,14 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr break; } else { currentlyProvisioning.addAll(newNodes); - lastProvisionTime = new DateTime(); + lastProvisionTime = DateTimes.nowUtc(); scalingStats.addProvisionEvent(provisioned); want -= provisioned.getNodeIds().size(); didProvision = true; } } } else { - Duration durSinceLastProvision = new Duration(lastProvisionTime, new DateTime()); + Duration durSinceLastProvision = new Duration(lastProvisionTime, DateTimes.nowUtc()); log.info("%s provisioning. Current wait time: %s", currentlyProvisioning, durSinceLastProvision); if (durSinceLastProvision.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) { log.makeAlert("Worker node provisioning taking too long!") @@ -329,13 +330,13 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr .terminate(ImmutableList.copyOf(laziestWorkerIps)); if (terminated != null) { currentlyTerminating.addAll(terminated.getNodeIds()); - lastTerminateTime = new DateTime(); + lastTerminateTime = DateTimes.nowUtc(); scalingStats.addTerminateEvent(terminated); didTerminate = true; } } } else { - Duration durSinceLastTerminate = new Duration(lastTerminateTime, new DateTime()); + Duration durSinceLastTerminate = new Duration(lastTerminateTime, DateTimes.nowUtc()); log.info("%s terminating. Current wait time: %s", currentlyTerminating, durSinceLastTerminate); @@ -407,7 +408,7 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr task.getId() ) ), - DateTime.now() + DateTimes.nowUtc() ); } @@ -418,7 +419,7 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java index 4c7c9b03692..3bff13e999a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java @@ -20,6 +20,7 @@ package io.druid.indexing.overlord.autoscaling; import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; @@ -37,7 +38,7 @@ public class ProvisioningSchedulerConfig private Period terminatePeriod = new Period("PT5M"); @JsonProperty - private DateTime originTime = new DateTime("2012-01-01T00:55:00.000Z"); + private DateTime originTime = DateTimes.of("2012-01-01T00:55:00.000Z"); public boolean isDoAutoscale() { diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java index b68482ccce9..e210c17ac5c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.collect.Lists; import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import java.util.Collections; @@ -68,26 +69,14 @@ public class ScalingStats public void addProvisionEvent(AutoScalingData data) { synchronized (lock) { - recentEvents.add( - new ScalingEvent( - data, - new DateTime(), - EVENT.PROVISION - ) - ); + recentEvents.add(new ScalingEvent(data, DateTimes.nowUtc(), EVENT.PROVISION)); } } public void addTerminateEvent(AutoScalingData data) { synchronized (lock) { - recentEvents.add( - new ScalingEvent( - data, - new DateTime(), - EVENT.TERMINATE - ) - ); + recentEvents.add(new ScalingEvent(data, DateTimes.nowUtc(), EVENT.TERMINATE)); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java index 9c2da6e1b37..d5a79c18dff 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java @@ -36,6 +36,7 @@ import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.WorkerTaskRunner; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Duration; @@ -103,8 +104,8 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning private final Set currentlyTerminating = Sets.newHashSet(); private int targetWorkerCount = -1; - private DateTime lastProvisionTime = new DateTime(); - private DateTime lastTerminateTime = new DateTime(); + private DateTime lastProvisionTime = DateTimes.nowUtc(); + private DateTime lastTerminateTime = lastProvisionTime; SimpleProvisioner(WorkerTaskRunner runner) { @@ -154,7 +155,7 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning break; } else { currentlyProvisioning.addAll(newNodes); - lastProvisionTime = new DateTime(); + lastProvisionTime = DateTimes.nowUtc(); scalingStats.addProvisionEvent(provisioned); want -= provisioned.getNodeIds().size(); didProvision = true; @@ -162,7 +163,7 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning } if (!currentlyProvisioning.isEmpty()) { - Duration durSinceLastProvision = new Duration(lastProvisionTime, new DateTime()); + Duration durSinceLastProvision = new Duration(lastProvisionTime, DateTimes.nowUtc()); log.info("%s provisioning. Current wait time: %s", currentlyProvisioning, durSinceLastProvision); if (durSinceLastProvision.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) { log.makeAlert("Worker node provisioning taking too long!") @@ -250,14 +251,14 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning .terminate(ImmutableList.copyOf(laziestWorkerIps)); if (terminated != null) { currentlyTerminating.addAll(terminated.getNodeIds()); - lastTerminateTime = new DateTime(); + lastTerminateTime = DateTimes.nowUtc(); scalingStats.addTerminateEvent(terminated); didTerminate = true; } } } } else { - Duration durSinceLastTerminate = new Duration(lastTerminateTime, new DateTime()); + Duration durSinceLastTerminate = new Duration(lastTerminateTime, DateTimes.nowUtc()); log.info("%s terminating. Current wait time: %s", currentlyTerminating, durSinceLastTerminate); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java index 4b25c540b63..f98df52c743 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java @@ -52,6 +52,8 @@ import io.druid.indexing.overlord.WorkerTaskRunner; import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.http.security.TaskResourceFilter; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -291,7 +293,7 @@ public class OverlordResource @QueryParam("count") final Integer count ) { - Interval theInterval = interval == null ? null : new Interval(interval); + Interval theInterval = interval == null ? null : Intervals.of(interval); if (theInterval == null && count != null) { try { return Response.ok( @@ -420,8 +422,8 @@ public class OverlordResource new TaskRunnerWorkItem( task.getId(), SettableFuture.create(), - new DateTime(0), - new DateTime(0) + DateTimes.EPOCH, + DateTimes.EPOCH ) { @Override @@ -539,8 +541,8 @@ public class OverlordResource // Would be nice to include the real created date, but the TaskStorage API doesn't yet allow it. return new TaskResponseObject( taskStatus.getId(), - new DateTime(0), - new DateTime(0), + DateTimes.EPOCH, + DateTimes.EPOCH, Optional.of(taskStatus), TaskLocation.unknown() ); diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java index 80956556aee..f2b370f9cb7 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java @@ -25,10 +25,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; - import io.druid.curator.CuratorUtils; import io.druid.curator.announcement.Announcer; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -37,7 +37,6 @@ import io.druid.server.initialization.IndexerZkConfig; import org.apache.curator.framework.CuratorFramework; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; -import org.joda.time.DateTime; import java.util.Arrays; import java.util.List; @@ -98,7 +97,7 @@ public class WorkerCuratorCoordinator curatorFramework, getTaskPathForWorker(), CreateMode.PERSISTENT, - jsonMapper.writeValueAsBytes(ImmutableMap.of("created", new DateTime().toString())), + jsonMapper.writeValueAsBytes(ImmutableMap.of("created", DateTimes.nowUtc().toString())), config.getMaxZnodeBytes() ); @@ -106,7 +105,7 @@ public class WorkerCuratorCoordinator curatorFramework, getStatusPathForWorker(), CreateMode.PERSISTENT, - jsonMapper.writeValueAsBytes(ImmutableMap.of("created", new DateTime().toString())), + jsonMapper.writeValueAsBytes(ImmutableMap.of("created", DateTimes.nowUtc().toString())), config.getMaxZnodeBytes() ); diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java index 3036f1f1850..6c83eea9bb8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java @@ -33,12 +33,11 @@ import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskRunner; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; -import org.joda.time.DateTime; - import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -118,7 +117,7 @@ public class ExecutorLifecycle log.info("Attempting to lock file[%s].", taskLockFile); final long startLocking = System.currentTimeMillis(); - final long timeout = new DateTime(startLocking).plus(taskConfig.getDirectoryLockTimeout()).getMillis(); + final long timeout = DateTimes.utc(startLocking).plus(taskConfig.getDirectoryLockTimeout()).getMillis(); while (taskLockFileLock == null && System.currentTimeMillis() < timeout) { taskLockFileLock = taskLockChannel.tryLock(); if (taskLockFileLock == null) { diff --git a/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java b/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java index e011a22b615..2d7b14541c8 100644 --- a/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java @@ -23,13 +23,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.Intervals; import io.druid.segment.realtime.appenderator.SegmentIdentifier; import io.druid.segment.realtime.appenderator.UsedSegmentChecker; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; -import org.junit.Assert; import org.easymock.EasyMock; -import org.joda.time.Interval; +import org.junit.Assert; import org.junit.Test; import java.io.IOException; @@ -43,19 +43,19 @@ public class ActionBasedUsedSegmentCheckerTest final TaskActionClient taskActionClient = EasyMock.createMock(TaskActionClient.class); EasyMock.expect( taskActionClient.submit( - new SegmentListUsedAction("bar", null, ImmutableList.of(new Interval("2002/P1D"))) + new SegmentListUsedAction("bar", null, ImmutableList.of(Intervals.of("2002/P1D"))) ) ).andReturn( ImmutableList.of( DataSegment.builder() .dataSource("bar") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(0)) .version("b") .build(), DataSegment.builder() .dataSource("bar") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(1)) .version("b") .build() @@ -63,31 +63,31 @@ public class ActionBasedUsedSegmentCheckerTest ); EasyMock.expect( taskActionClient.submit( - new SegmentListUsedAction("foo", null, ImmutableList.of(new Interval("2000/P1D"), new Interval("2001/P1D"))) + new SegmentListUsedAction("foo", null, ImmutableList.of(Intervals.of("2000/P1D"), Intervals.of("2001/P1D"))) ) ).andReturn( ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2000/P1D")) + .interval(Intervals.of("2000/P1D")) .shardSpec(new LinearShardSpec(0)) .version("a") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2000/P1D")) + .interval(Intervals.of("2000/P1D")) .shardSpec(new LinearShardSpec(1)) .version("a") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2001/P1D")) + .interval(Intervals.of("2001/P1D")) .shardSpec(new LinearShardSpec(1)) .version("b") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(1)) .version("b") .build() @@ -98,9 +98,9 @@ public class ActionBasedUsedSegmentCheckerTest final UsedSegmentChecker checker = new ActionBasedUsedSegmentChecker(taskActionClient); final Set segments = checker.findUsedSegments( ImmutableSet.of( - new SegmentIdentifier("foo", new Interval("2000/P1D"), "a", new LinearShardSpec(1)), - new SegmentIdentifier("foo", new Interval("2001/P1D"), "b", new LinearShardSpec(0)), - new SegmentIdentifier("bar", new Interval("2002/P1D"), "b", new LinearShardSpec(0)) + new SegmentIdentifier("foo", Intervals.of("2000/P1D"), "a", new LinearShardSpec(1)), + new SegmentIdentifier("foo", Intervals.of("2001/P1D"), "b", new LinearShardSpec(0)), + new SegmentIdentifier("bar", Intervals.of("2002/P1D"), "b", new LinearShardSpec(0)) ) ); @@ -108,13 +108,13 @@ public class ActionBasedUsedSegmentCheckerTest ImmutableSet.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2000/P1D")) + .interval(Intervals.of("2000/P1D")) .shardSpec(new LinearShardSpec(1)) .version("a") .build(), DataSegment.builder() .dataSource("bar") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(0)) .version("b") .build() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java b/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java index da3b66f3280..1542b33aef6 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java @@ -28,6 +28,7 @@ import io.druid.client.cache.CacheConfig; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.Intervals; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.IndexIO; import io.druid.segment.IndexMergerV9; @@ -42,7 +43,6 @@ import io.druid.server.coordination.DataSegmentAnnouncer; import io.druid.server.coordination.DataSegmentServerAnnouncer; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -165,7 +165,7 @@ public class TaskToolboxTest .expect(mockSegmentLoaderLocalCacheManager.withConfig(EasyMock.anyObject())) .andReturn(mockSegmentLoaderLocalCacheManager).anyTimes(); EasyMock.replay(mockSegmentLoaderLocalCacheManager); - DataSegment dataSegment = DataSegment.builder().dataSource("source").interval(new Interval("2012-01-01/P1D")).version("1").size(1).build(); + DataSegment dataSegment = DataSegment.builder().dataSource("source").interval(Intervals.of("2012-01-01/P1D")).version("1").size(1).build(); List segments = ImmutableList.of ( dataSegment diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java index 50d75608631..c07d444dde9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java @@ -34,9 +34,9 @@ import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import org.easymock.EasyMock; import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -100,7 +100,7 @@ public class RemoteTaskActionClientTest result = Collections.singletonList(new TaskLock( "groupId", "dataSource", - new Interval(now - 30 * 1000, now), + Intervals.utc(now - 30 * 1000, now), "version" )); } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java index a587b38e1c1..888ae59dc90 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java @@ -31,6 +31,7 @@ import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -56,8 +57,8 @@ public class SegmentAllocateActionTest public TaskActionTestKit taskActionTestKit = new TaskActionTestKit(); private static final String DATA_SOURCE = "none"; - private static final DateTime PARTY_TIME = new DateTime("1999"); - private static final DateTime THE_DISTANT_FUTURE = new DateTime("3000"); + private static final DateTime PARTY_TIME = DateTimes.of("1999"); + private static final DateTime THE_DISTANT_FUTURE = DateTimes.of("3000"); @Before public void setUp() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java index 5cac6ce6a65..1dd84feb265 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.hamcrest.CoreMatchers; @@ -44,7 +45,7 @@ public class SegmentInsertActionTest public TaskActionTestKit actionTestKit = new TaskActionTestKit(); private static final String DATA_SOURCE = "none"; - private static final Interval INTERVAL = new Interval("2020/2020T01"); + private static final Interval INTERVAL = Intervals.of("2020/2020T01"); private static final String PARTY_YEAR = "1999"; private static final String THE_DISTANT_FUTURE = "3000"; @@ -90,7 +91,7 @@ public class SegmentInsertActionTest final Task task = new NoopTask(null, 0, 0, null, null, null); final SegmentInsertAction action = new SegmentInsertAction(ImmutableSet.of(SEGMENT1, SEGMENT2)); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); action.perform(task, actionTestKit.getTaskActionToolbox()); Assert.assertEquals( @@ -108,7 +109,7 @@ public class SegmentInsertActionTest final Task task = new NoopTask(null, 0, 0, null, null, null); final SegmentInsertAction action = new SegmentInsertAction(ImmutableSet.of(SEGMENT3)); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); thrown.expect(IllegalStateException.class); thrown.expectMessage(CoreMatchers.startsWith("Segments not covered by locks for task")); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java index e22f600253b..6ae7c181f3b 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java @@ -22,6 +22,7 @@ package io.druid.indexing.common.actions; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import io.druid.TestUtil; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -37,7 +38,7 @@ public class SegmentListUsedActionTest @Test public void testSingleIntervalSerde() throws Exception { - Interval interval = Interval.parse("2014/2015"); + Interval interval = Intervals.of("2014/2015"); SegmentListUsedAction expected = new SegmentListUsedAction( "dataSource", @@ -53,7 +54,7 @@ public class SegmentListUsedActionTest @Test public void testMultiIntervalSerde() throws Exception { - List intervals = ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2016/2017")); + List intervals = ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")); SegmentListUsedAction expected = new SegmentListUsedAction( "dataSource", null, @@ -71,6 +72,6 @@ public class SegmentListUsedActionTest String jsonStr = "{\"type\": \"segmentListUsed\", \"dataSource\": \"test\", \"interval\": \"2014/2015\"}"; SegmentListUsedAction actual = (SegmentListUsedAction) MAPPER.readValue(jsonStr, TaskAction.class); - Assert.assertEquals(new SegmentListUsedAction("test", Interval.parse("2014/2015"), null), actual); + Assert.assertEquals(new SegmentListUsedAction("test", Intervals.of("2014/2015"), null), actual); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java index c88d1db02ff..359fd762a57 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java @@ -26,6 +26,7 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ObjectMetadata; import io.druid.indexing.overlord.SegmentPublishResult; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.hamcrest.CoreMatchers; @@ -44,7 +45,7 @@ public class SegmentTransactionalInsertActionTest public TaskActionTestKit actionTestKit = new TaskActionTestKit(); private static final String DATA_SOURCE = "none"; - private static final Interval INTERVAL = new Interval("2020/2020T01"); + private static final Interval INTERVAL = Intervals.of("2020/2020T01"); private static final String PARTY_YEAR = "1999"; private static final String THE_DISTANT_FUTURE = "3000"; @@ -89,7 +90,7 @@ public class SegmentTransactionalInsertActionTest { final Task task = new NoopTask(null, 0, 0, null, null, null); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); SegmentPublishResult result1 = new SegmentTransactionalInsertAction( ImmutableSet.of(SEGMENT1), @@ -130,7 +131,7 @@ public class SegmentTransactionalInsertActionTest { final Task task = new NoopTask(null, 0, 0, null, null, null); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); SegmentPublishResult result = new SegmentTransactionalInsertAction( ImmutableSet.of(SEGMENT1), @@ -150,7 +151,7 @@ public class SegmentTransactionalInsertActionTest final Task task = new NoopTask(null, 0, 0, null, null, null); final SegmentTransactionalInsertAction action = new SegmentTransactionalInsertAction(ImmutableSet.of(SEGMENT3)); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); thrown.expect(IllegalStateException.class); thrown.expectMessage(CoreMatchers.startsWith("Segments not covered by locks for task")); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java index 74b07627fa2..a512da94117 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.indexing.common.TestUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; @@ -46,7 +48,8 @@ public class ConvertSegmentTaskTest public void testSerializationSimple() throws Exception { final String dataSource = "billy"; - final Interval interval = new Interval(new DateTime().minus(1000), new DateTime()); + DateTime start = DateTimes.nowUtc(); + final Interval interval = new Interval(start.minus(1000), start); ConvertSegmentTask task = ConvertSegmentTask.create(dataSource, interval, null, false, true, null); @@ -56,7 +59,7 @@ public class ConvertSegmentTaskTest DataSegment segment = new DataSegment( dataSource, interval, - new DateTime().toString(), + DateTimes.nowUtc().toString(), ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), @@ -81,7 +84,7 @@ public class ConvertSegmentTaskTest + "}"; ConvertSegmentTask task = (ConvertSegmentTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("billy", task.getDataSource()); - Assert.assertEquals(new Interval("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); + Assert.assertEquals(Intervals.of("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); } @Test @@ -94,6 +97,6 @@ public class ConvertSegmentTaskTest + "}"; ConvertSegmentTask task = (ConvertSegmentTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("billy", task.getDataSource()); - Assert.assertEquals(new Interval("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); + Assert.assertEquals(Intervals.of("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java index c120845eb1b..be8885818c0 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.indexing.common.TestUtils; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexSpec; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressionFactory; @@ -44,7 +45,7 @@ public class HadoopConverterTaskSerDeTest private static final String TASK_ID = "task id"; private static final String DATA_SOURCE = "datasource"; - private static final Interval INTERVAL = Interval.parse("2010/2011"); + private static final Interval INTERVAL = Intervals.of("2010/2011"); private static final String SEGMENT_VERSION = "some version"; private static final Map LOAD_SPEC = ImmutableMap.of("someKey", "someVal"); private static final List DIMENSIONS = ImmutableList.of("dim1", "dim2"); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java index fd2adc740a5..0091c40b15d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java @@ -41,6 +41,8 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.IndexTask.IndexIngestionSpec; import io.druid.indexing.common.task.IndexTask.IndexTuningConfig; import io.druid.indexing.overlord.SegmentPublishResult; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.parsers.ParseException; @@ -61,7 +63,6 @@ import io.druid.timeline.partition.HashBasedNumberedShardSpec; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.ShardSpec; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; @@ -151,13 +152,13 @@ public class IndexTaskTest Assert.assertEquals(2, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertEquals(HashBasedNumberedShardSpec.class, segments.get(0).getShardSpec().getClass()); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals(2, ((NumberedShardSpec) segments.get(0).getShardSpec()).getPartitions()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval()); Assert.assertEquals(HashBasedNumberedShardSpec.class, segments.get(1).getShardSpec().getClass()); Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum()); Assert.assertEquals(2, ((NumberedShardSpec) segments.get(1).getShardSpec()).getPartitions()); @@ -196,12 +197,12 @@ public class IndexTaskTest Assert.assertEquals(2, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertEquals(NumberedShardSpec.class, segments.get(0).getShardSpec().getClass()); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval()); Assert.assertEquals(NumberedShardSpec.class, segments.get(1).getShardSpec().getClass()); Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum()); } @@ -227,7 +228,7 @@ public class IndexTaskTest null, new ArbitraryGranularitySpec( Granularities.MINUTE, - Collections.singletonList(new Interval("2014/2015")) + Collections.singletonList(Intervals.of("2014/2015")) ), createTuningConfig(10, null, false, true), false @@ -261,7 +262,7 @@ public class IndexTaskTest new UniformGranularitySpec( Granularities.HOUR, Granularities.HOUR, - Collections.singletonList(new Interval("2015-03-01T08:00:00Z/2015-03-01T09:00:00Z")) + Collections.singletonList(Intervals.of("2015-03-01T08:00:00Z/2015-03-01T09:00:00Z")) ), createTuningConfig(50, null, false, true), false @@ -304,7 +305,7 @@ public class IndexTaskTest Assert.assertEquals(1, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); } @@ -343,12 +344,12 @@ public class IndexTaskTest Assert.assertEquals(2, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NumberedShardSpec.class)); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval()); Assert.assertTrue(segments.get(1).getShardSpec().getClass().equals(NumberedShardSpec.class)); Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum()); } @@ -387,17 +388,17 @@ public class IndexTaskTest Assert.assertEquals(3, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014-01-01T00/PT1H"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014-01-01T00/PT1H"), segments.get(0).getInterval()); Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014-01-01T01/PT1H"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014-01-01T01/PT1H"), segments.get(1).getInterval()); Assert.assertTrue(segments.get(1).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(1).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(2).getDataSource()); - Assert.assertEquals(new Interval("2014-01-01T02/PT1H"), segments.get(2).getInterval()); + Assert.assertEquals(Intervals.of("2014-01-01T02/PT1H"), segments.get(2).getInterval()); Assert.assertTrue(segments.get(2).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(2).getShardSpec().getPartitionNum()); } @@ -448,7 +449,7 @@ public class IndexTaskTest Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @Test @@ -497,7 +498,7 @@ public class IndexTaskTest Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @Test @@ -541,7 +542,7 @@ public class IndexTaskTest for (int i = 0; i < 6; i++) { final DataSegment segment = segments.get(i); - final Interval expectedInterval = new Interval(StringUtils.format("2014-01-01T0%d/PT1H", (i / 2))); + final Interval expectedInterval = Intervals.of(StringUtils.format("2014-01-01T0%d/PT1H", (i / 2))); final int expectedPartitionNum = i % 2; Assert.assertEquals("test", segment.getDataSource()); @@ -583,7 +584,7 @@ public class IndexTaskTest for (int i = 0; i < 3; i++) { final DataSegment segment = segments.get(i); - final Interval expectedInterval = new Interval("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); + final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); Assert.assertEquals("test", segment.getDataSource()); Assert.assertEquals(expectedInterval, segment.getInterval()); @@ -624,7 +625,7 @@ public class IndexTaskTest for (int i = 0; i < 5; i++) { final DataSegment segment = segments.get(i); - final Interval expectedInterval = new Interval("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); + final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); Assert.assertEquals("test", segment.getDataSource()); Assert.assertEquals(expectedInterval, segment.getInterval()); @@ -697,7 +698,7 @@ public class IndexTaskTest Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @Test @@ -826,7 +827,7 @@ public class IndexTaskTest ); Assert.assertEquals(Arrays.asList("val"), segment.getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segment.getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segment.getInterval()); } } @@ -892,7 +893,7 @@ public class IndexTaskTest if (taskAction instanceof LockListAction) { return (RetType) Collections.singletonList( new TaskLock( - "", "", null, new DateTime().toString() + "", "", null, DateTimes.nowUtc().toString() ) ); } @@ -902,7 +903,7 @@ public class IndexTaskTest "groupId", "test", ((LockAcquireAction) taskAction).getInterval(), - new DateTime().toString() + DateTimes.nowUtc().toString() ); } @@ -983,7 +984,7 @@ public class IndexTaskTest granularitySpec != null ? granularitySpec : new UniformGranularitySpec( Granularities.DAY, Granularities.MINUTE, - Arrays.asList(new Interval("2014/2015")) + Arrays.asList(Intervals.of("2014/2015")) ), jsonMapper ), diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java index bb3f982d8fd..0a61e0bdbbd 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java @@ -23,8 +23,8 @@ import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hashing; import io.druid.indexing.common.TaskToolbox; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -39,9 +39,9 @@ public class MergeTaskBaseTest .version("V1"); final List segments = ImmutableList.builder() - .add(segmentBuilder.interval(new Interval("2012-01-04/2012-01-06")).build()) - .add(segmentBuilder.interval(new Interval("2012-01-05/2012-01-07")).build()) - .add(segmentBuilder.interval(new Interval("2012-01-03/2012-01-05")).build()) + .add(segmentBuilder.interval(Intervals.of("2012-01-04/2012-01-06")).build()) + .add(segmentBuilder.interval(Intervals.of("2012-01-05/2012-01-07")).build()) + .add(segmentBuilder.interval(Intervals.of("2012-01-03/2012-01-05")).build()) .build(); final MergeTaskBase testMergeTaskBase = new MergeTaskBase(null, "foo", segments, null) @@ -68,7 +68,7 @@ public class MergeTaskBaseTest @Test public void testInterval() { - Assert.assertEquals(new Interval("2012-01-03/2012-01-07"), testMergeTaskBase.getInterval()); + Assert.assertEquals(Intervals.of("2012-01-03/2012-01-07"), testMergeTaskBase.getInterval()); } @Test diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java index c6d954420fc..c40d8e30ec9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -69,6 +69,7 @@ import io.druid.indexing.test.TestDataSegmentKiller; import io.druid.indexing.test.TestDataSegmentPusher; import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; @@ -246,7 +247,7 @@ public class RealtimeIndexTaskTest EmittingLogger.registerEmitter(emitter); emitter.start(); taskExec = MoreExecutors.listeningDecorator(Execs.singleThreaded("realtime-index-task-test-%d")); - now = new DateTime(); + now = DateTimes.nowUtc(); } @After @@ -260,7 +261,7 @@ public class RealtimeIndexTaskTest { Assert.assertEquals( "index_realtime_test_0_2015-01-02T00:00:00.000Z_abcdefgh", - RealtimeIndexTask.makeTaskId("test", 0, new DateTime("2015-01-02"), 0x76543210) + RealtimeIndexTask.makeTaskId("test", 0, DateTimes.of("2015-01-02"), 0x76543210) ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java index 265e66a177c..6e68e8f9952 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java @@ -31,6 +31,7 @@ import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.IndexIO; @@ -45,7 +46,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -85,7 +85,7 @@ public class SameIntervalMergeTaskTest final SameIntervalMergeTask task = new SameIntervalMergeTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), aggregators, true, indexSpec, @@ -107,7 +107,7 @@ public class SameIntervalMergeTaskTest Assert.assertEquals("foo", mergeSegment.getDataSource()); Assert.assertEquals(newVersion, mergeSegment.getVersion()); // the merged segment's interval is within the requested interval - Assert.assertTrue(new Interval("2010-01-01/P1D").contains(mergeSegment.getInterval())); + Assert.assertTrue(Intervals.of("2010-01-01/P1D").contains(mergeSegment.getInterval())); // the merged segment should be NoneShardSpec Assert.assertTrue(mergeSegment.getShardSpec() instanceof NoneShardSpec); } @@ -153,19 +153,19 @@ public class SameIntervalMergeTaskTest List segments = ImmutableList.of( DataSegment.builder() .dataSource(mergeTask.getDataSource()) - .interval(new Interval("2010-01-01/PT1H")) + .interval(Intervals.of("2010-01-01/PT1H")) .version("oldVersion") .shardSpec(new LinearShardSpec(0)) .build(), DataSegment.builder() .dataSource(mergeTask.getDataSource()) - .interval(new Interval("2010-01-01/PT1H")) + .interval(Intervals.of("2010-01-01/PT1H")) .version("oldVersion") .shardSpec(new LinearShardSpec(0)) .build(), DataSegment.builder() .dataSource(mergeTask.getDataSource()) - .interval(new Interval("2010-01-01/PT2H")) + .interval(Intervals.of("2010-01-01/PT2H")) .version("oldVersion") .shardSpec(new LinearShardSpec(0)) .build() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java index b78e89997eb..02dfdbbd050 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java @@ -33,6 +33,7 @@ import io.druid.indexing.common.TestUtils; import io.druid.indexing.common.task.IndexTask.IndexIOConfig; import io.druid.indexing.common.task.IndexTask.IndexIngestionSpec; import io.druid.indexing.common.task.IndexTask.IndexTuningConfig; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -53,7 +54,6 @@ import io.druid.segment.realtime.plumber.PlumberSchool; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.hamcrest.CoreMatchers; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Assert; import org.junit.Rule; @@ -183,7 +183,7 @@ public class TaskSerdeTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), jsonMapper ), @@ -245,7 +245,7 @@ public class TaskSerdeTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), jsonMapper ), @@ -283,7 +283,7 @@ public class TaskSerdeTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2010-01-01/P1D")) + .interval(Intervals.of("2010-01-01/P1D")) .version("1234") .build() ); @@ -305,7 +305,7 @@ public class TaskSerdeTest final MergeTask task2 = (MergeTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -328,7 +328,7 @@ public class TaskSerdeTest ); Assert.assertEquals("foo", task3.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task3.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task3.getInterval()); Assert.assertEquals(segments, task3.getSegments()); Assert.assertEquals(aggregators, task3.getAggregators()); } @@ -340,7 +340,7 @@ public class TaskSerdeTest final SameIntervalMergeTask task = new SameIntervalMergeTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), aggregators, true, indexSpec, @@ -354,7 +354,7 @@ public class TaskSerdeTest final SameIntervalMergeTask task2 = (SameIntervalMergeTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -374,7 +374,7 @@ public class TaskSerdeTest final KillTask task = new KillTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null ); @@ -384,7 +384,7 @@ public class TaskSerdeTest final KillTask task2 = (KillTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -395,20 +395,20 @@ public class TaskSerdeTest jsonMapper.writeValueAsString( new ClientKillQuery( "foo", - new Interval("2010-01-01/P1D") + Intervals.of("2010-01-01/P1D") ) ), Task.class ); Assert.assertEquals("foo", task3.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task3.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task3.getInterval()); } @Test public void testVersionConverterTaskSerde() throws Exception { final ConvertSegmentTask task = ConvertSegmentTask.create( - DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2010-01-01/P1D")).version("1234").build(), null, false, true, @@ -421,7 +421,7 @@ public class TaskSerdeTest final ConvertSegmentTask task2 = (ConvertSegmentTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -435,7 +435,7 @@ public class TaskSerdeTest { final ConvertSegmentTask.SubTask task = new ConvertSegmentTask.SubTask( "myGroupId", - DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2010-01-01/P1D")).version("1234").build(), indexSpec, false, true, @@ -546,12 +546,12 @@ public class TaskSerdeTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2010-01-01/P1D")) + .interval(Intervals.of("2010-01-01/P1D")) .version("1234") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2010-01-02/P1D")) + .interval(Intervals.of("2010-01-02/P1D")) .version("5678") .build() ); @@ -573,7 +573,7 @@ public class TaskSerdeTest final AppendTask task2 = (AppendTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P2D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P2D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -591,7 +591,7 @@ public class TaskSerdeTest ); Assert.assertEquals("foo", task3.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P2D"), task3.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P2D"), task3.getInterval()); Assert.assertEquals(task3.getSegments(), segments); Assert.assertEquals(task.getAggregators(), task2.getAggregators()); } @@ -602,7 +602,7 @@ public class TaskSerdeTest final ArchiveTask task = new ArchiveTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null ); @@ -612,7 +612,7 @@ public class TaskSerdeTest final ArchiveTask task2 = (ArchiveTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -626,7 +626,7 @@ public class TaskSerdeTest final RestoreTask task = new RestoreTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null ); @@ -636,7 +636,7 @@ public class TaskSerdeTest final RestoreTask task2 = (RestoreTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -650,7 +650,7 @@ public class TaskSerdeTest final ConvertSegmentTask task = ConvertSegmentTask.create( new DataSegment( "dataSource", - Interval.parse("1990-01-01/1999-12-31"), + Intervals.of("1990-01-01/1999-12-31"), "version", ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), @@ -674,7 +674,7 @@ public class TaskSerdeTest { final DataSegment segment = new DataSegment( "dataSource", - Interval.parse("1990-01-01/1999-12-31"), + Intervals.of("1990-01-01/1999-12-31"), "version", ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), @@ -726,7 +726,7 @@ public class TaskSerdeTest final MoveTask task = new MoveTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), ImmutableMap.of("bucket", "hey", "baseKey", "what"), null, null @@ -738,7 +738,7 @@ public class TaskSerdeTest final MoveTask task2 = (MoveTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(ImmutableMap.of("bucket", "hey", "baseKey", "what"), task.getTargetLoadSpec()); Assert.assertEquals(task.getId(), task2.getId()); @@ -758,7 +758,7 @@ public class TaskSerdeTest "foo", null, new AggregatorFactory[0], new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), new HadoopIOConfig(ImmutableMap.of("paths", "bar"), null, null), null diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 7b8b0c33c65..59f21640036 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -33,7 +33,8 @@ import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Module; import com.metamx.emitter.service.ServiceEmitter; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; @@ -323,7 +324,7 @@ public class IngestSegmentFirehoseFactoryTest new Object[]{ new IngestSegmentFirehoseFactory( DATA_SOURCE_NAME, - FOREVER, + Intervals.ETERNITY, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, @@ -399,7 +400,6 @@ public class IngestSegmentFirehoseFactoryTest } private static final Logger log = new Logger(IngestSegmentFirehoseFactoryTest.class); - private static final Interval FOREVER = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); private static final String DATA_SOURCE_NAME = "testDataSource"; private static final String DATA_SOURCE_VERSION = "version"; private static final Integer BINARY_VERSION = -1; @@ -450,7 +450,7 @@ public class IngestSegmentFirehoseFactoryTest Preconditions.checkArgument(shardNumber >= 0); return new DataSegment( DATA_SOURCE_NAME, - FOREVER, + Intervals.ETERNITY, DATA_SOURCE_VERSION, ImmutableMap.of( "type", "local", @@ -506,7 +506,7 @@ public class IngestSegmentFirehoseFactoryTest if (factory.getDimensions() != null) { Assert.assertArrayEquals(new String[]{DIM_NAME}, factory.getDimensions().toArray()); } - Assert.assertEquals(FOREVER, factory.getInterval()); + Assert.assertEquals(Intervals.ETERNITY, factory.getInterval()); if (factory.getMetrics() != null) { Assert.assertEquals( ImmutableSet.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME), diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index 668564a0579..25c8fe77e4f 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -31,7 +31,6 @@ import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Module; -import io.druid.common.utils.JodaUtils; import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; @@ -49,6 +48,9 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.filter.NoopDimFilter; import io.druid.segment.IndexIO; @@ -66,7 +68,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.apache.commons.io.FileUtils; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; @@ -172,7 +173,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest return new TestCase( tmpDir, - new Interval(intervalString), + Intervals.of(intervalString), expectedCount, expectedSum, segments @@ -186,16 +187,16 @@ public class IngestSegmentFirehoseFactoryTimelineTest InputRow... rows ) { - return new DataSegmentMaker(new Interval(intervalString), version, partitionNum, Arrays.asList(rows)); + return new DataSegmentMaker(Intervals.of(intervalString), version, partitionNum, Arrays.asList(rows)); } private static InputRow IR(String timeString, long metricValue) { return new MapBasedInputRow( - new DateTime(timeString).getMillis(), + DateTimes.of(timeString).getMillis(), Arrays.asList(DIMENSIONS), ImmutableMap.of( - TIME_COLUMN, new DateTime(timeString).toString(), + TIME_COLUMN, DateTimes.of(timeString).toString(), DIMENSIONS[0], "bar", METRICS[0], metricValue ) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java index 213fa3f5af3..7a3bb57a5f6 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; import io.druid.indexing.worker.Worker; import io.druid.jackson.DefaultObjectMapper; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; @@ -40,7 +40,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ); ObjectMapper mapper = new DefaultObjectMapper(); final ImmutableWorkerInfo serde = mapper.readValue( @@ -61,7 +61,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker", "192.0.0.1", 10, "v1" @@ -69,7 +69,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), true); // different worker same tasks @@ -80,7 +80,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -88,7 +88,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different task groups @@ -99,7 +99,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp3", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker", "192.0.0.1", 10, "v1" @@ -107,7 +107,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different tasks @@ -118,7 +118,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -126,7 +126,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task3"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different capacity @@ -137,7 +137,7 @@ public class ImmutableWorkerInfoTest 3, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -145,7 +145,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different lastCompletedTaskTime @@ -156,7 +156,7 @@ public class ImmutableWorkerInfoTest 3, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -164,7 +164,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:02Z") + DateTimes.of("2015-01-01T01:01:02Z") ), false); // same worker different blacklistedUntil @@ -175,8 +175,8 @@ public class ImmutableWorkerInfoTest 3, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z"), - new DateTime("2017-07-30") + DateTimes.of("2015-01-01T01:01:01Z"), + DateTimes.of("2017-07-30") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -184,8 +184,8 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:02Z"), - new DateTime("2017-07-31") + DateTimes.of("2015-01-01T01:01:02Z"), + DateTimes.of("2017-07-31") ), false); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java index a168460769d..cb6d21ff74d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java @@ -31,6 +31,7 @@ import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.AbstractTask; import io.druid.indexing.common.task.TaskResource; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import org.joda.time.Interval; import org.junit.Assert; @@ -66,8 +67,8 @@ public class RealtimeishTask extends AbstractTask @Override public TaskStatus run(TaskToolbox toolbox) throws Exception { - final Interval interval1 = new Interval("2010-01-01T00/PT1H"); - final Interval interval2 = new Interval("2010-01-01T01/PT1H"); + final Interval interval1 = Intervals.of("2010-01-01T00/PT1H"); + final Interval interval2 = Intervals.of("2010-01-01T01/PT1H"); // Sort of similar to what realtime tasks do: diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java index 766163932f2..22cf1645361 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java @@ -39,10 +39,11 @@ import io.druid.indexing.common.task.Task; import io.druid.indexing.common.task.TaskResource; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; import org.apache.curator.framework.CuratorFramework; import org.apache.zookeeper.CreateMode; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; @@ -602,11 +603,11 @@ public class RemoteTaskRunnerTest public void testSortByInsertionTime() throws Exception { RemoteTaskRunnerWorkItem item1 = new RemoteTaskRunnerWorkItem("b", null, null) - .withQueueInsertionTime(new DateTime("2015-01-01T00:00:03Z")); + .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:03Z")); RemoteTaskRunnerWorkItem item2 = new RemoteTaskRunnerWorkItem("a", null, null) - .withQueueInsertionTime(new DateTime("2015-01-01T00:00:02Z")); + .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:02Z")); RemoteTaskRunnerWorkItem item3 = new RemoteTaskRunnerWorkItem("c", null, null) - .withQueueInsertionTime(new DateTime("2015-01-01T00:00:01Z")); + .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:01Z")); ArrayList workItems = Lists.newArrayList(item1, item2, item3); RemoteTaskRunner.sortByInsertionTime(workItems); Assert.assertEquals(item3, workItems.get(0)); @@ -709,7 +710,7 @@ public class RemoteTaskRunnerTest makeRemoteTaskRunner(rtrConfig); for (int i = 1; i < 13; i++) { - String taskId = String.format("rt-%d", i); + String taskId = StringUtils.format("rt-%d", i); TestRealtimeTask task = new TestRealtimeTask( taskId, new TaskResource(taskId, 1), "foo", TaskStatus.success(taskId), jsonMapper ); @@ -747,7 +748,7 @@ public class RemoteTaskRunnerTest makeRemoteTaskRunner(rtrConfig); for (int i = 1; i < 13; i++) { - String taskId = String.format("rt-%d", i); + String taskId = StringUtils.format("rt-%d", i); TestRealtimeTask task = new TestRealtimeTask( taskId, new TaskResource(taskId, 1), "foo", TaskStatus.success(taskId), jsonMapper ); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 1aa8a3f51ba..8d38b8abc10 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -74,7 +74,9 @@ import io.druid.indexing.overlord.config.TaskQueueConfig; import io.druid.indexing.overlord.supervisor.SupervisorManager; import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.RE; import io.druid.java.util.common.StringUtils; @@ -182,7 +184,7 @@ public class TaskLifecycleTest return Comparators.intervalsByStartThenEnd().compare(dataSegment.getInterval(), dataSegment2.getInterval()); } }; - private static DateTime now = new DateTime(); + private static DateTime now = DateTimes.nowUtc(); private static final Iterable realtimeIdxTaskInputRows = ImmutableList.of( IR(now.toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 1.0f), @@ -234,7 +236,7 @@ public class TaskLifecycleTest private static InputRow IR(String dt, String dim1, String dim2, float met) { return new MapBasedInputRow( - new DateTime(dt).getMillis(), + DateTimes.of(dt).getMillis(), ImmutableList.of("dim1", "dim2"), ImmutableMap.of( "dim1", dim1, @@ -661,7 +663,7 @@ public class TaskLifecycleTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), mapper ), @@ -686,7 +688,7 @@ public class TaskLifecycleTest Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size()); Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource()); - Assert.assertEquals("segment1 interval", new Interval("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); + Assert.assertEquals("segment1 interval", Intervals.of("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); Assert.assertEquals( "segment1 dimensions", ImmutableList.of("dim1", "dim2"), @@ -695,7 +697,7 @@ public class TaskLifecycleTest Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics()); Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource()); - Assert.assertEquals("segment2 interval", new Interval("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); + Assert.assertEquals("segment2 interval", Intervals.of("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); Assert.assertEquals( "segment2 dimensions", ImmutableList.of("dim1", "dim2"), @@ -718,7 +720,7 @@ public class TaskLifecycleTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), mapper ), @@ -750,7 +752,7 @@ public class TaskLifecycleTest @Override public DataSegment apply(String input) { - final Interval interval = new Interval(input); + final Interval interval = Intervals.of(input); try { return DataSegment.builder() .dataSource("test_kill_task") @@ -789,13 +791,13 @@ public class TaskLifecycleTest // manually create local segments files List segmentFiles = Lists.newArrayList(); - for (DataSegment segment : mdc.getUnusedSegmentsForInterval("test_kill_task", new Interval("2011-04-01/P4D"))) { + for (DataSegment segment : mdc.getUnusedSegmentsForInterval("test_kill_task", Intervals.of("2011-04-01/P4D"))) { File file = new File((String) segment.getLoadSpec().get("path")); file.mkdirs(); segmentFiles.add(file); } - final Task killTask = new KillTask(null, "test_kill_task", new Interval("2011-04-01/P4D"), null); + final Task killTask = new KillTask(null, "test_kill_task", Intervals.of("2011-04-01/P4D"), null); final TaskStatus status = runTask(killTask); Assert.assertEquals("merged statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode()); @@ -860,7 +862,7 @@ public class TaskLifecycleTest "id1", new TaskResource("id1", 1), "ds", - new Interval("2012-01-01/P1D"), + Intervals.of("2012-01-01/P1D"), null ) { @@ -880,7 +882,7 @@ public class TaskLifecycleTest final DataSegment segment = DataSegment.builder() .dataSource("ds") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version(myLock.getVersion()) .build(); @@ -899,7 +901,7 @@ public class TaskLifecycleTest @Test public void testBadInterval() throws Exception { - final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null) + final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", Intervals.of("2012-01-01/P1D"), null) { @Override public String getType() @@ -914,7 +916,7 @@ public class TaskLifecycleTest final DataSegment segment = DataSegment.builder() .dataSource("ds") - .interval(new Interval("2012-01-01/P2D")) + .interval(Intervals.of("2012-01-01/P2D")) .version(myLock.getVersion()) .build(); @@ -933,7 +935,7 @@ public class TaskLifecycleTest @Test public void testBadVersion() throws Exception { - final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null) + final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", Intervals.of("2012-01-01/P1D"), null) { @Override public String getType() @@ -948,7 +950,7 @@ public class TaskLifecycleTest final DataSegment segment = DataSegment.builder() .dataSource("ds") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version(myLock.getVersion() + "1!!!1!!") .build(); @@ -1002,7 +1004,7 @@ public class TaskLifecycleTest Assert.assertEquals("test_ds", segment.getDataSource()); Assert.assertEquals(ImmutableList.of("dim1", "dim2"), segment.getDimensions()); Assert.assertEquals( - new Interval(now.toString("YYYY-MM-dd") + "/" + now.plusDays(1).toString("YYYY-MM-dd")), + Intervals.of(now.toString("YYYY-MM-dd") + "/" + now.plusDays(1).toString("YYYY-MM-dd")), segment.getInterval() ); Assert.assertEquals(ImmutableList.of("count"), segment.getMetrics()); @@ -1082,7 +1084,7 @@ public class TaskLifecycleTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), mapper ), @@ -1116,7 +1118,7 @@ public class TaskLifecycleTest Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size()); Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource()); - Assert.assertEquals("segment1 interval", new Interval("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); + Assert.assertEquals("segment1 interval", Intervals.of("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); Assert.assertEquals( "segment1 dimensions", ImmutableList.of("dim1", "dim2"), @@ -1125,7 +1127,7 @@ public class TaskLifecycleTest Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics()); Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource()); - Assert.assertEquals("segment2 interval", new Interval("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); + Assert.assertEquals("segment2 interval", Intervals.of("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); Assert.assertEquals( "segment2 dimensions", ImmutableList.of("dim1", "dim2"), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java index f30b15d6a7f..d7fceb6a1ec 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java @@ -31,12 +31,12 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.metadata.EntryExistsException; import io.druid.metadata.SQLMetadataStorageActionHandlerFactory; import io.druid.metadata.TestDerbyConnector; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -84,13 +84,13 @@ public class TaskLockboxTest { Task task = NoopTask.create(); lockbox.add(task); - Assert.assertNotNull(lockbox.lock(task, new Interval("2015-01-01/2015-01-02"))); + Assert.assertNotNull(lockbox.lock(task, Intervals.of("2015-01-01/2015-01-02"))); } @Test(expected = IllegalStateException.class) public void testLockForInactiveTask() throws InterruptedException { - lockbox.lock(NoopTask.create(), new Interval("2015-01-01/2015-01-02")); + lockbox.lock(NoopTask.create(), Intervals.of("2015-01-01/2015-01-02")); } @Test @@ -101,7 +101,7 @@ public class TaskLockboxTest exception.expectMessage("Unable to grant lock to inactive Task"); lockbox.add(task); lockbox.remove(task); - lockbox.lock(task, new Interval("2015-01-01/2015-01-02")); + lockbox.lock(task, Intervals.of("2015-01-01/2015-01-02")); } @Test @@ -109,18 +109,18 @@ public class TaskLockboxTest { Task task = NoopTask.create(); lockbox.add(task); - Assert.assertTrue(lockbox.tryLock(task, new Interval("2015-01-01/2015-01-03")).isPresent()); + Assert.assertTrue(lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-03")).isPresent()); // try to take lock for task 2 for overlapping interval Task task2 = NoopTask.create(); lockbox.add(task2); - Assert.assertFalse(lockbox.tryLock(task2, new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertFalse(lockbox.tryLock(task2, Intervals.of("2015-01-01/2015-01-02")).isPresent()); // task 1 unlocks the lock lockbox.remove(task); // Now task2 should be able to get the lock - Assert.assertTrue(lockbox.tryLock(task2, new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertTrue(lockbox.tryLock(task2, Intervals.of("2015-01-01/2015-01-02")).isPresent()); } @Test @@ -128,17 +128,17 @@ public class TaskLockboxTest { Task task = NoopTask.create(); lockbox.add(task); - Optional lock1 = lockbox.tryLock(task, new Interval("2015-01-01/2015-01-03")); + Optional lock1 = lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-03")); Assert.assertTrue(lock1.isPresent()); - Assert.assertEquals(new Interval("2015-01-01/2015-01-03"), lock1.get().getInterval()); + Assert.assertEquals(Intervals.of("2015-01-01/2015-01-03"), lock1.get().getInterval()); // same task tries to take partially overlapping interval; should fail - Assert.assertFalse(lockbox.tryLock(task, new Interval("2015-01-02/2015-01-04")).isPresent()); + Assert.assertFalse(lockbox.tryLock(task, Intervals.of("2015-01-02/2015-01-04")).isPresent()); // same task tries to take contained interval; should succeed and should match the original lock - Optional lock2 = lockbox.tryLock(task, new Interval("2015-01-01/2015-01-02")); + Optional lock2 = lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-02")); Assert.assertTrue(lock2.isPresent()); - Assert.assertEquals(new Interval("2015-01-01/2015-01-03"), lock2.get().getInterval()); + Assert.assertEquals(Intervals.of("2015-01-01/2015-01-03"), lock2.get().getInterval()); // only the first lock should actually exist Assert.assertEquals( @@ -151,7 +151,7 @@ public class TaskLockboxTest @Test(expected = IllegalStateException.class) public void testTryLockForInactiveTask() { - Assert.assertFalse(lockbox.tryLock(NoopTask.create(), new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertFalse(lockbox.tryLock(NoopTask.create(), Intervals.of("2015-01-01/2015-01-02")).isPresent()); } @Test @@ -162,7 +162,7 @@ public class TaskLockboxTest exception.expectMessage("Unable to grant lock to inactive Task"); lockbox.add(task); lockbox.remove(task); - Assert.assertFalse(lockbox.tryLock(task, new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertFalse(lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-02")).isPresent()); } @Test @@ -173,8 +173,8 @@ public class TaskLockboxTest lockbox.add(task1); lockbox.add(task2); - Assert.assertNotNull(lockbox.lock(task1, new Interval("2015-01-01/2015-01-02"), 5000)); - Assert.assertNull(lockbox.lock(task2, new Interval("2015-01-01/2015-01-15"), 1000)); + lockbox.lock(task1, Intervals.of("2015-01-01/2015-01-02"), 5000); + lockbox.lock(task2, Intervals.of("2015-01-01/2015-01-15"), 5000); } @Test @@ -186,7 +186,7 @@ public class TaskLockboxTest taskStorage.insert(task, TaskStatus.running(task.getId())); originalBox.add(task); Assert.assertTrue( - originalBox.tryLock(task, new Interval(StringUtils.format("2017-01-0%d/2017-01-0%d", (i + 1), (i + 2)))) + originalBox.tryLock(task, Intervals.of(StringUtils.format("2017-01-0%d/2017-01-0%d", (i + 1), (i + 2)))) .isPresent() ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java index 3604838ec0d..0af3fd855f5 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java @@ -44,6 +44,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; @@ -338,7 +339,7 @@ public class PendingTaskBasedProvisioningStrategyTest testTask.getId(), null, TaskLocation.unknown() - ).withQueueInsertionTime(new DateTime()) + ).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java index 3dfc4201657..5701fa34ad7 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java @@ -42,6 +42,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; @@ -123,7 +124,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ); EasyMock.expect(runner.getWorkers()).andReturn( @@ -160,7 +161,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -218,7 +219,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -270,7 +271,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -314,7 +315,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -365,7 +366,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -472,7 +473,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java index 722e5637a79..9cad5274fd5 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java @@ -26,7 +26,7 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; @@ -46,31 +46,31 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest ImmutableMap.of( "localhost0", new ImmutableWorkerInfo( - new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), - DateTime.now() + new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, + Sets.newHashSet(), + Sets.newHashSet(), + DateTimes.nowUtc() ), "localhost1", new ImmutableWorkerInfo( new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost2", new ImmutableWorkerInfo( new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost3", new ImmutableWorkerInfo( new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -101,14 +101,14 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -132,7 +132,7 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java index fad4d5ec990..0e115afd7c7 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java @@ -26,7 +26,7 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; @@ -46,14 +46,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -82,14 +82,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 5, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -119,14 +119,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 2, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -156,14 +156,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java index 43bcf7d6b5e..d3b6f5ce115 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java @@ -26,7 +26,7 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; @@ -49,14 +49,14 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -87,14 +87,14 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -118,7 +118,7 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java index 6778deb7130..c22dde9eb1f 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java @@ -22,12 +22,12 @@ package io.druid.indexing.overlord.supervisor; import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; import io.druid.indexing.overlord.DataSourceMetadata; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataSupervisorManager; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; import org.easymock.EasyMockSupport; import org.easymock.Mock; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -194,7 +194,7 @@ public class SupervisorManagerTest extends EasyMockSupport @Test public void testGetSupervisorStatus() throws Exception { - SupervisorReport report = new SupervisorReport("id1", DateTime.now()) + SupervisorReport report = new SupervisorReport("id1", DateTimes.nowUtc()) { @Override public Object getPayload() diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java index c593f6986c0..f4b96b3416c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java @@ -26,13 +26,13 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.TaskMaster; -import org.easymock.Capture; +import io.druid.java.util.common.DateTimes; import io.druid.server.security.AuthConfig; +import org.easymock.Capture; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; import org.easymock.EasyMockSupport; import org.easymock.Mock; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -148,7 +148,7 @@ public class SupervisorResourceTest extends EasyMockSupport @Test public void testSpecGetStatus() throws Exception { - SupervisorReport report = new SupervisorReport("id", DateTime.now()) + SupervisorReport report = new SupervisorReport("id", DateTimes.nowUtc()) { @Override public Object getPayload() diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 18b39e2f221..1864d651e51 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -211,6 +211,16 @@ + + de.thetaphi + forbiddenapis + + + + ../codestyle/joda-time-forbidden-apis.txt + + + @@ -244,6 +254,16 @@ + + de.thetaphi + forbiddenapis + + + + ../codestyle/joda-time-forbidden-apis.txt + + + diff --git a/integration-tests/src/main/java/org/testng/TestNG.java b/integration-tests/src/main/java/org/testng/TestNG.java index f375cea6298..0586a81519b 100644 --- a/integration-tests/src/main/java/org/testng/TestNG.java +++ b/integration-tests/src/main/java/org/testng/TestNG.java @@ -329,17 +329,8 @@ public class TestNG s.getChildSuites().add(cSuite); } } - catch (FileNotFoundException e) { - e.printStackTrace(System.out); - } - catch (ParserConfigurationException e) { - e.printStackTrace(System.out); - } - catch (SAXException e) { - e.printStackTrace(System.out); - } - catch (IOException e) { - e.printStackTrace(System.out); + catch (ParserConfigurationException | SAXException | IOException e) { + LOGGER.error("", e); } } @@ -366,17 +357,8 @@ public class TestNG } } } - catch (FileNotFoundException e) { - e.printStackTrace(System.out); - } - catch (IOException e) { - e.printStackTrace(System.out); - } - catch (ParserConfigurationException e) { - e.printStackTrace(System.out); - } - catch (SAXException e) { - e.printStackTrace(System.out); + catch (IOException | SAXException | ParserConfigurationException e) { + LOGGER.error("", e); } catch (Exception ex) { // Probably a Yaml exception, unnest it @@ -453,14 +435,8 @@ public class TestNG m_suites.add(xmlSuite); } } - catch (ParserConfigurationException ex) { - ex.printStackTrace(); - } - catch (SAXException ex) { - ex.printStackTrace(); - } - catch (IOException ex) { - ex.printStackTrace(); + catch (ParserConfigurationException | SAXException | IOException ex) { + LOGGER.error("", ex); } } @@ -1181,17 +1157,12 @@ public class TestNG if (!m_hasTests) { setStatus(HAS_NO_TEST); if (TestRunner.getVerbose() > 1) { - System.err.println("[TestNG] No tests found. Nothing was run"); + LOGGER.error("[TestNG] No tests found. Nothing was run"); usage(); } } } - private void p(String string) - { - System.out.println("[TestNG] " + string); - } - private void runExecutionListeners(boolean start) { for (List listeners @@ -1231,8 +1202,7 @@ public class TestNG ); } catch (Exception ex) { - System.err.println("[TestNG] Reporter " + reporter + " failed"); - ex.printStackTrace(System.err); + LOGGER.error("[TestNG] Reporter " + reporter + " failed", ex); } } } @@ -1505,7 +1475,7 @@ public class TestNG } catch (TestNGException ex) { if (TestRunner.getVerbose() > 1) { - ex.printStackTrace(System.out); + LOGGER.error("", ex); } else { error(ex.getMessage()); } @@ -1927,7 +1897,7 @@ public class TestNG static void exitWithError(String msg) { - System.err.println(msg); + LOGGER.error(msg); usage(); System.exit(1); } diff --git a/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java b/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java index fbf7faa7cd5..78807e69587 100644 --- a/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java +++ b/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java @@ -31,6 +31,7 @@ import org.testng.TestNGException; import org.testng.TestRunner; import org.testng.collections.Lists; import org.testng.internal.ClassHelper; +import org.testng.log4testng.Logger; import org.testng.remote.strprotocol.GenericMessage; import org.testng.remote.strprotocol.IMessageSender; import org.testng.remote.strprotocol.MessageHelper; @@ -59,6 +60,8 @@ import static org.testng.internal.Utils.defaultIfStringEmpty; */ public class RemoteTestNG extends TestNG { + private static final Logger LOGGER = Logger.getLogger(TestNG.class); + // The following constants are referenced by the Eclipse plug-in, make sure you // modify the plug-in as well if you change any of them. public static final String DEBUG_PORT = "12345"; @@ -134,7 +137,7 @@ public class RemoteTestNG extends TestNG private static void p(String s) { if (isVerbose()) { - System.out.println("[RemoteTestNG] " + s); + LOGGER.info("[RemoteTestNG] " + s); } } @@ -191,11 +194,11 @@ public class RemoteTestNG extends TestNG super.run(); } else { - System.err.println("No test suite found. Nothing to run"); + LOGGER.error("No test suite found. Nothing to run"); } } catch (Throwable cause) { - cause.printStackTrace(System.err); + LOGGER.error("", cause); } finally { msh.shutDown(); diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java index cb6a2f435db..82b83fb983c 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.Intervals; import io.druid.testing.clients.CoordinatorResourceTestClient; import io.druid.testing.clients.OverlordResourceTestClient; import io.druid.testing.utils.RetryUtil; @@ -32,9 +33,9 @@ import org.joda.time.Interval; import java.io.IOException; import java.io.InputStream; -import java.util.concurrent.Callable; import java.util.ArrayList; import java.util.Collections; +import java.util.concurrent.Callable; public abstract class AbstractIndexerTest { @@ -72,7 +73,7 @@ public abstract class AbstractIndexerTest // Wait for any existing index tasks to complete before disabling the datasource otherwise // realtime tasks can get stuck waiting for handoff. https://github.com/druid-io/druid/issues/1729 waitForAllTasksToComplete(); - Interval interval = new Interval(start + "/" + end); + Interval interval = Intervals.of(start + "/" + end); coordinator.unloadSegmentsForDataSource(dataSource, interval); RetryUtil.retryUntilFalse( new Callable() diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java index 38313d15fa3..e637d1c8bba 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java @@ -25,6 +25,7 @@ import com.google.inject.Inject; import com.metamx.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -36,7 +37,6 @@ import io.druid.testing.utils.RetryUtil; import io.druid.testing.utils.ServerDiscoveryUtil; import org.apache.commons.io.IOUtils; import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.testng.annotations.Guice; @@ -102,7 +102,7 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest // the task will run for 3 minutes and then shutdown itself String task = setShutOffTime( getTaskAsString(REALTIME_TASK_RESOURCE), - new DateTime(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) + DateTimes.utc(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) ); LOG.info("indexerSpec: [%s]\n", task); taskID = indexer.submitTask(task); @@ -183,7 +183,6 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest public void postEvents() throws Exception { - DateTimeZone zone = DateTimeZone.forID("UTC"); final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME); eventReceiverSelector.start(); BufferedReader reader = null; @@ -212,7 +211,7 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest ); // there are 22 lines in the file int i = 1; - DateTime dt = new DateTime(zone); // timestamp used for sending each event + DateTime dt = DateTimes.nowUtc(); // timestamp used for sending each event dtFirst = dt; // timestamp of 1st event dtLast = dt; // timestamp of last event String line; @@ -244,7 +243,7 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest } catch (InterruptedException ex) { /* nothing */ } dtLast = dt; - dt = new DateTime(zone); + dt = DateTimes.nowUtc(); i++; } } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java index f83b2f44179..645f66dddae 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java @@ -25,6 +25,7 @@ import com.google.inject.Inject; import com.metamx.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.EventReceiverFirehoseTestClient; @@ -69,7 +70,7 @@ public class ITUnionQueryTest extends AbstractIndexerTest // Load 4 datasources with same dimensions String task = setShutOffTime( getTaskAsString(UNION_TASK_RESOURCE), - new DateTime(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) + DateTimes.utc(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) ); List taskIDs = Lists.newArrayList(); for (int i = 0; i < numTasks; i++) { diff --git a/java-util/src/main/java/io/druid/java/util/common/DateTimes.java b/java-util/src/main/java/io/druid/java/util/common/DateTimes.java new file mode 100644 index 00000000000..149e34ee397 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/DateTimes.java @@ -0,0 +1,59 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; + +public final class DateTimes +{ + public static final DateTime EPOCH = utc(0); + public static final DateTime MAX = utc(JodaUtils.MAX_INSTANT); + public static final DateTime MIN = utc(JodaUtils.MIN_INSTANT); + + public static DateTime utc(long instant) + { + return new DateTime(instant, ISOChronology.getInstanceUTC()); + } + + public static DateTime of(String instant) + { + return new DateTime(instant, ISOChronology.getInstanceUTC()); + } + + public static DateTime nowUtc() + { + return DateTime.now(ISOChronology.getInstanceUTC()); + } + + public static DateTime max(DateTime dt1, DateTime dt2) + { + return dt1.compareTo(dt2) >= 0 ? dt1 : dt2; + } + + public static DateTime min(DateTime dt1, DateTime dt2) + { + return dt1.compareTo(dt2) < 0 ? dt1 : dt2; + } + + private DateTimes() + { + } +} diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/StringIntervalFunction.java b/java-util/src/main/java/io/druid/java/util/common/Intervals.java similarity index 57% rename from indexing-hadoop/src/main/java/io/druid/indexer/StringIntervalFunction.java rename to java-util/src/main/java/io/druid/java/util/common/Intervals.java index 72a7828ddff..6a1e8b9c8b5 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/StringIntervalFunction.java +++ b/java-util/src/main/java/io/druid/java/util/common/Intervals.java @@ -17,18 +17,28 @@ * under the License. */ -package io.druid.indexer; +package io.druid.java.util.common; -import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; -/** -*/ -class StringIntervalFunction implements Function +public final class Intervals { - @Override - public Interval apply(String input) + public static final Interval ETERNITY = utc(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); + public static final ImmutableList ONLY_ETERNITY = ImmutableList.of(ETERNITY); + + public static Interval utc(long startInstant, long endInstant) + { + return new Interval(startInstant, endInstant, ISOChronology.getInstanceUTC()); + } + + public static Interval of(String interval) + { + return new Interval(interval, ISOChronology.getInstanceUTC()); + } + + private Intervals() { - return new Interval(input); } } diff --git a/common/src/main/java/io/druid/common/utils/JodaUtils.java b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java similarity index 97% rename from common/src/main/java/io/druid/common/utils/JodaUtils.java rename to java-util/src/main/java/io/druid/java/util/common/JodaUtils.java index bc46320774d..bda0f98ba05 100644 --- a/common/src/main/java/io/druid/common/utils/JodaUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java @@ -17,7 +17,7 @@ * under the License. */ -package io.druid.common.utils; +package io.druid.java.util.common; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; @@ -38,7 +38,6 @@ public class JodaUtils // limit intervals such that duration millis fits in a long public static final long MAX_INSTANT = Long.MAX_VALUE / 2; public static final long MIN_INSTANT = Long.MIN_VALUE / 2; - public static final Interval ETERNITY = new Interval(MIN_INSTANT, MAX_INSTANT); public static ArrayList condenseIntervals(Iterable intervals) { diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java index 59a84ae6ed1..946bb5a2a50 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java @@ -20,6 +20,7 @@ package io.druid.java.util.common.granularity; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; @@ -29,15 +30,6 @@ import org.joda.time.format.DateTimeFormatter; */ public class AllGranularity extends Granularity { - // These constants are from JodaUtils in druid-common. - // Creates circular dependency. - // Will be nice to move JodaUtils here sometime - public static final long MAX_INSTANT = Long.MAX_VALUE / 2; - public static final long MIN_INSTANT = Long.MIN_VALUE / 2; - - private final DateTime maxDateTime = new DateTime(MAX_INSTANT); - private final DateTime minDateTime = new DateTime(MIN_INSTANT); - /** * This constructor is public b/c it is serialized and deserialized * based on type in GranularityModule @@ -53,7 +45,7 @@ public class AllGranularity extends Granularity @Override public DateTime increment(DateTime time) { - return maxDateTime; + return DateTimes.MAX; } @Override @@ -65,7 +57,7 @@ public class AllGranularity extends Granularity @Override public DateTime bucketStart(DateTime time) { - return minDateTime; + return DateTimes.MIN; } @Override diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java index f8f556c2db6..419280be151 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.primitives.Longs; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormatter; @@ -61,7 +62,7 @@ public class DurationGranularity extends Granularity @JsonProperty("origin") public DateTime getOrigin() { - return new DateTime(origin); + return DateTimes.utc(origin); } public long getOriginMillis() @@ -78,13 +79,13 @@ public class DurationGranularity extends Granularity @Override public DateTime increment(DateTime time) { - return new DateTime(time.getMillis() + getDurationMillis()); + return time.plus(getDuration()); } @Override public DateTime decrement(DateTime time) { - return new DateTime(time.getMillis() - getDurationMillis()); + return time.minus(getDuration()); } @Override @@ -96,7 +97,7 @@ public class DurationGranularity extends Granularity if (offset < 0) { offset += duration; } - return new DateTime(t - offset); + return new DateTime(t - offset, time.getChronology()); } @Override diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java index 1ec439dff89..9a76af789c1 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import io.druid.java.util.common.Cacheable; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; @@ -30,8 +31,6 @@ import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; -import java.util.Collections; -import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -79,7 +78,6 @@ public abstract class Granularity implements Cacheable public static List granularitiesFinerThan(final Granularity gran0) { - final DateTime epoch = new DateTime(0); final List retVal = Lists.newArrayList(); final DateTime origin = (gran0 instanceof PeriodGranularity) ? ((PeriodGranularity) gran0).getOrigin() : null; final DateTimeZone tz = (gran0 instanceof PeriodGranularity) ? ((PeriodGranularity) gran0).getTimeZone() : null; @@ -93,21 +91,17 @@ public abstract class Granularity implements Cacheable continue; } final Granularity segmentGranularity = gran.create(origin, tz); - if (segmentGranularity.bucket(epoch).toDurationMillis() <= gran0.bucket(epoch).toDurationMillis()) { + final long segmentGranularityDurationMillis = segmentGranularity.bucket(DateTimes.EPOCH).toDurationMillis(); + final long gran0DurationMillis = gran0.bucket(DateTimes.EPOCH).toDurationMillis(); + if (segmentGranularityDurationMillis <= gran0DurationMillis) { retVal.add(segmentGranularity); } } - Collections.sort( - retVal, - new Comparator() - { - @Override - public int compare(Granularity g1, Granularity g2) - { - return Longs.compare(g2.bucket(epoch).toDurationMillis(), g1.bucket(epoch).toDurationMillis()); - } - } - ); + retVal.sort((g1, g2) -> { + long duration1 = g2.bucket(DateTimes.EPOCH).toDurationMillis(); + long duration2 = g1.bucket(DateTimes.EPOCH).toDurationMillis(); + return Longs.compare(duration1, duration2); + }); return retVal; } @@ -128,7 +122,7 @@ public abstract class Granularity implements Cacheable public DateTime toDateTime(long offset) { - return new DateTime(offset, DateTimeZone.UTC); + return DateTimes.utc(offset); } public DateTime toDate(String filePath) diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java index a74efb7ec3d..dceaa4a1998 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java @@ -23,6 +23,7 @@ import io.druid.java.util.common.IAE; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; /** * Only to create a mapping of the granularity and all the supported file patterns @@ -159,7 +160,8 @@ public enum GranularityType dateValuePositions >= 4 ? vals[4] : 0, dateValuePositions >= 5 ? vals[5] : 0, dateValuePositions >= 6 ? vals[6] : 0, - 0 + 0, + ISOChronology.getInstanceUTC() ); } diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java index 1fff1a42bab..ba4b24d4254 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java @@ -42,23 +42,19 @@ public class NoneGranularity extends Granularity @Override public DateTime increment(DateTime time) { - return new DateTime(time.getMillis() + 1); + return time.plus(1); } @Override public DateTime decrement(DateTime time) { - return new DateTime(time.getMillis() - 1); + return time.minus(1); } @Override public DateTime bucketStart(DateTime time) { - if (time == null) { - return null; - } - - return new DateTime(time.getMillis()); + return time; } @Override diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java index 4708e855b6a..2df9d6c98a7 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.JsonSerializable; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.jsontype.TypeSerializer; import com.google.common.base.Preconditions; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import org.joda.time.Chronology; @@ -37,6 +38,7 @@ import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; +import javax.annotation.Nullable; import java.io.IOException; /** @@ -84,9 +86,10 @@ public class PeriodGranularity extends Granularity implements JsonSerializable } @JsonProperty("origin") + @Nullable public DateTime getOrigin() { - return hasOrigin ? new DateTime(origin) : null; + return hasOrigin ? DateTimes.utc(origin) : null; } // Used only for Segments. Not for Queries diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java index fdff821e288..427f7ba3694 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java @@ -21,6 +21,7 @@ package io.druid.java.util.common.parsers; import com.google.common.base.Function; import com.google.common.base.Preconditions; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; @@ -29,6 +30,8 @@ import org.joda.time.format.DateTimeFormatterBuilder; import org.joda.time.format.DateTimeParser; import org.joda.time.format.ISODateTimeFormat; +import java.util.concurrent.TimeUnit; + public class TimestampParser { public static Function createTimestampParser( @@ -50,7 +53,7 @@ public class TimestampParser } } - return new DateTime(Long.parseLong(input)); + return DateTimes.utc(Long.parseLong(input)); } }; } else if (format.equalsIgnoreCase("iso")) { @@ -60,7 +63,7 @@ public class TimestampParser public DateTime apply(String input) { Preconditions.checkArgument(input != null && !input.isEmpty(), "null timestamp"); - return new DateTime(ParserUtils.stripQuotes(input)); + return DateTimes.of(ParserUtils.stripQuotes(input)); } }; } else if (format.equalsIgnoreCase("posix") @@ -118,7 +121,7 @@ public class TimestampParser @Override public DateTime apply(Number input) { - return new DateTime(input.longValue() * 1000); + return DateTimes.utc(TimeUnit.SECONDS.toMillis(input.longValue())); } }; } else if (format.equalsIgnoreCase("nano")) { @@ -127,7 +130,7 @@ public class TimestampParser @Override public DateTime apply(Number input) { - return new DateTime(input.longValue() / 1000000L); + return DateTimes.utc(TimeUnit.NANOSECONDS.toMillis(input.longValue())); } }; } else { @@ -136,7 +139,7 @@ public class TimestampParser @Override public DateTime apply(Number input) { - return new DateTime(input.longValue()); + return DateTimes.utc(input.longValue()); } }; } diff --git a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java index f801215106f..1a1be9f6566 100644 --- a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java @@ -28,6 +28,7 @@ import org.joda.time.DateTimeZone; import org.joda.time.IllegalFieldValueException; import org.joda.time.Interval; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Test; @@ -51,9 +52,9 @@ public class GranularityTest public void testHiveFormat() { PathDate[] secondChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "dt=2011-03-15-20-50-43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "/dt=2011-03-15-20-50-43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "valid/dt=2011-03-15-20-50-43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "dt=2011-03-15-20-50-43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "/dt=2011-03-15-20-50-43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "valid/dt=2011-03-15-20-50-43/Test1"), new PathDate(null, null, "valid/dt=2011-03-15-20-50/Test2"), new PathDate(null, null, "valid/dt=2011-03-15-20/Test3"), new PathDate(null, null, "valid/dt=2011-03-15/Test4"), @@ -75,9 +76,9 @@ public class GranularityTest public void testSecondToDate() { PathDate[] secondChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), @@ -102,10 +103,10 @@ public class GranularityTest { PathDate[] minuteChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), @@ -114,7 +115,7 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 20, 42, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 20, 42, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), @@ -129,18 +130,18 @@ public class GranularityTest { PathDate[] minuteChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=00/Test2a"), - new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=14/Test2b"), - new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=15/Test2c"), - new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=29/Test2d"), - new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=30/Test2e"), - new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=44/Test2f"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=45/Test2g"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=59/Test2h"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=00/Test2a"), + new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=14/Test2b"), + new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=15/Test2c"), + new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=29/Test2d"), + new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=30/Test2e"), + new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=44/Test2f"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=45/Test2g"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=59/Test2h"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), @@ -149,7 +150,7 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 20, 30, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 20, 30, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), @@ -163,11 +164,11 @@ public class GranularityTest public void testHourToDate() { PathDate[] hourChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), @@ -175,8 +176,8 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") @@ -189,11 +190,11 @@ public class GranularityTest public void testSixHourToDate() { PathDate[] hourChecks = { - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), @@ -201,14 +202,14 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=00/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=02/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=06/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=11/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=12/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=13/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=00/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=02/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=06/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=11/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=12/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=13/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") @@ -221,21 +222,21 @@ public class GranularityTest public void testDayToDate() { PathDate[] dayChecks = { - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), new PathDate(null, null, "null/y=/m=/d=/Test7"), new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") }; @@ -247,22 +248,22 @@ public class GranularityTest public void testMonthToDate() { PathDate[] monthChecks = { - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/Test5"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), new PathDate(null, null, "null/y=/m=/d=/Test7"), new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") }; @@ -273,23 +274,23 @@ public class GranularityTest public void testYearToDate() { PathDate[] yearChecks = { - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/Test5"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/Test6"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/Test5"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/Test6"), new PathDate(null, null, "null/y=/m=/d=/Test7"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") }; checkToDate(YEAR, Granularity.Formatter.DEFAULT, yearChecks); } @@ -366,54 +367,54 @@ public class GranularityTest @Test public void testBucket() { - DateTime dt = new DateTime("2011-02-03T04:05:06.100"); + DateTime dt = DateTimes.of("2011-02-03T04:05:06.100"); - Assert.assertEquals(new Interval("2011-01-01/2012-01-01"), YEAR.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-01/2011-03-01"), MONTH.bucket(dt)); - Assert.assertEquals(new Interval("2011-01-31/2011-02-07"), WEEK.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03/2011-02-04"), DAY.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03T04/2011-02-03T05"), HOUR.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03T04:05:00/2011-02-03T04:06:00"), MINUTE.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03T04:05:06/2011-02-03T04:05:07"), SECOND.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-01-01/2012-01-01"), YEAR.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-01/2011-03-01"), MONTH.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-01-31/2011-02-07"), WEEK.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03/2011-02-04"), DAY.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03T04/2011-02-03T05"), HOUR.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03T04:05:00/2011-02-03T04:06:00"), MINUTE.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03T04:05:06/2011-02-03T04:05:07"), SECOND.bucket(dt)); // Test with aligned DateTime - Assert.assertEquals(new Interval("2011-01-01/2011-01-02"), DAY.bucket(new DateTime("2011-01-01"))); + Assert.assertEquals(Intervals.of("2011-01-01/2011-01-02"), DAY.bucket(DateTimes.of("2011-01-01"))); } @Test public void testTruncate() throws Exception { - DateTime date = new DateTime("2011-03-15T22:42:23.898"); - Assert.assertEquals(new DateTime("2011-01-01T00:00:00.000"), YEAR.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-01T00:00:00.000"), MONTH.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-14T00:00:00.000"), WEEK.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T00:00:00.000"), DAY.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T22:00:00.000"), HOUR.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T22:42:00.000"), MINUTE.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T22:42:23.000"), SECOND.bucketStart(date)); + DateTime date = DateTimes.of("2011-03-15T22:42:23.898"); + Assert.assertEquals(DateTimes.of("2011-01-01T00:00:00.000"), YEAR.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-01T00:00:00.000"), MONTH.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-14T00:00:00.000"), WEEK.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T00:00:00.000"), DAY.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T22:00:00.000"), HOUR.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T22:42:00.000"), MINUTE.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T22:42:23.000"), SECOND.bucketStart(date)); } @Test public void testGetIterable() throws Exception { - DateTime start = new DateTime("2011-01-01T00:00:00"); - DateTime end = new DateTime("2011-01-14T00:00:00"); + DateTime start = DateTimes.of("2011-01-01T00:00:00"); + DateTime end = DateTimes.of("2011-01-14T00:00:00"); Iterator intervals = DAY.getIterable(new Interval(start, end)).iterator(); - Assert.assertEquals(new Interval("2011-01-01/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-02/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-03/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-04/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-05/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-06/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-07/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-08/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-09/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-10/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-11/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-12/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-13/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-01/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-02/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-03/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-04/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-05/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-06/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-07/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-08/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-09/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-10/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-11/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-12/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-13/P1d"), intervals.next()); try { intervals.next(); @@ -427,9 +428,9 @@ public class GranularityTest public void testCustomPeriodToDate() { PathDate[] customChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1") + new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1") }; checkToDate(new PeriodGranularity(new Period("PT2S"), null, DateTimeZone.UTC), Granularity.Formatter.DEFAULT, customChecks); } diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java index e2714136ff9..c2346763ea8 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java @@ -19,6 +19,7 @@ package io.druid.java.util.common.guava; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -63,32 +64,32 @@ public class ComparatorsTest { Comparator comp = Comparators.intervalsByStartThenEnd(); - Assert.assertEquals(0, comp.compare(new Interval("P1d/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-03-31/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-02"), new Interval("2011-03-31/2011-04-02"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-04"))); + Assert.assertEquals(0, comp.compare(Intervals.of("P1d/2011-04-02"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-03-31/2011-04-02"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-02"), Intervals.of("2011-03-31/2011-04-02"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-01/2011-04-04"))); Interval[] intervals = new Interval[]{ - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-02/2011-04-04"), - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06") + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-02/2011-04-04"), + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06") }; Arrays.sort(intervals, comp); Assert.assertArrayEquals( new Interval[]{ - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06"), - new Interval("2011-04-02/2011-04-04"), + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06"), + Intervals.of("2011-04-02/2011-04-04"), }, intervals ); @@ -99,32 +100,32 @@ public class ComparatorsTest { Comparator comp = Comparators.intervalsByEndThenStart(); - Assert.assertEquals(0, comp.compare(new Interval("P1d/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-04"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-02"), new Interval("2011-04-01/2011-04-01"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-02/2011-04-03"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-03-31/2011-04-03"))); + Assert.assertEquals(0, comp.compare(Intervals.of("P1d/2011-04-02"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-01/2011-04-04"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-02"), Intervals.of("2011-04-01/2011-04-01"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-02/2011-04-03"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-03-31/2011-04-03"))); Interval[] intervals = new Interval[]{ - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-02/2011-04-04"), - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06") + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-02/2011-04-04"), + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06") }; Arrays.sort(intervals, comp); Assert.assertArrayEquals( new Interval[]{ - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-02/2011-04-04") + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-02/2011-04-04") }, intervals ); diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java index 35ce86e1c9d..2c0ae1eea24 100644 --- a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java @@ -20,7 +20,9 @@ package io.druid.java.util.common.parsers; import com.google.common.base.Function; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -42,15 +44,15 @@ public class TimestampParserTest public void testAuto() throws Exception { final Function parser = TimestampParser.createObjectTimestampParser("auto"); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("1234567890000")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13T23:31:30Z")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13T23:31:30-08:00")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30Z")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13 23:31:30-08:00")); - Assert.assertEquals(new DateTime("2009-02-13T00:00:00Z"), parser.apply("2009-02-13")); - Assert.assertEquals(new DateTime("2009-02-13T00:00:00Z"), parser.apply("\"2009-02-13\"")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply(1234567890000L)); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("1234567890000")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("2009-02-13T23:31:30Z")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13T23:31:30-08:00")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30Z")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13 23:31:30-08:00")); + Assert.assertEquals(DateTimes.of("2009-02-13T00:00:00Z"), parser.apply("2009-02-13")); + Assert.assertEquals(DateTimes.of("2009-02-13T00:00:00Z"), parser.apply("\"2009-02-13\"")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply(1234567890000L)); } @Test @@ -75,116 +77,88 @@ public class TimestampParserTest public void testRuby() throws Exception { final Function parser = TimestampParser.createObjectTimestampParser("ruby"); - Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply("1358347307.435447")); - Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply(1358347307.435447D)); + Assert.assertEquals(DateTimes.of("2013-01-16T15:41:47+01:00"), parser.apply("1358347307.435447")); + Assert.assertEquals(DateTimes.of("2013-01-16T15:41:47+01:00"), parser.apply(1358347307.435447D)); } @Test public void testNano() throws Exception { String timeNsStr = "1427504794977098494"; - DateTime expectedDt = new DateTime("2015-3-28T01:06:34.977Z"); + DateTime expectedDt = DateTimes.of("2015-3-28T01:06:34.977Z"); final Function parser = TimestampParser.createObjectTimestampParser("nano"); Assert.assertEquals("Incorrect truncation of nanoseconds -> milliseconds", expectedDt, parser.apply(timeNsStr)); // Confirm sub-millisecond timestamps are handled correctly - expectedDt = new DateTime("1970-1-1T00:00:00.000Z"); + expectedDt = DateTimes.of("1970-1-1T00:00:00.000Z"); Assert.assertEquals(expectedDt, parser.apply("999999")); Assert.assertEquals(expectedDt, parser.apply("0")); Assert.assertEquals(expectedDt, parser.apply("0000")); Assert.assertEquals(expectedDt, parser.apply(999999L)); } - /*Commenting out until Joda 2.1 supported @Test - public void testTimeStampParserWithQuotes() throws Exception { + public void testTimeStampParserWithQuotes() throws Exception + { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); + Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); Assert.assertEquals(d.getMillis(), parser.apply(" \" Wed Nov 9 04:00:00 PST 1994 \" ").getMillis()); } @Test - public void testTimeStampParserWithShortTimeZone() throws Exception { + public void testTimeStampParserWithShortTimeZone() throws Exception + { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); + Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); Assert.assertEquals(d.getMillis(), parser.apply("Wed Nov 9 04:00:00 PST 1994").getMillis()); } @Test - public void testTimeStampParserWithLongTimeZone() throws Exception { + public void testTimeStampParserWithLongTimeZone() throws Exception + { long millis1 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)).getMillis(); long millis2 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-6)).getMillis(); - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ z yyyy"); + Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss zZ z yyyy"); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 PST 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 CST 1994").getMillis()); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 PST 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 CST 1994").getMillis()); - parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ yyyy"); + parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss zZ yyyy"); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 1994").getMillis()); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 1994").getMillis()); - - parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ Q yyyy"); - Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 (PST) 1994").getMillis()); - Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 (CST) 1994").getMillis()); - Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 (PST) 1994").getMillis()); - Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 (CST) 1994").getMillis()); - } @Test - public void testTimeZoneAtExtremeLocations() throws Exception { - Function parser = ParserUtils.createTimestampParser("EEE MMM dd yy HH:mm:ss zZ z"); + public void testTimeZoneAtExtremeLocations() throws Exception + { + Function parser = TimestampParser.createTimestampParser("EEE MMM dd yy HH:mm:ss zZ z"); Assert.assertEquals(new DateTime(2005, 1, 22, 13, 0, DateTimeZone.forOffsetHours(-6)).getMillis(), parser.apply("Sat Jan 22 05 13:00:00 GMT-0600 CST").getMillis()); - parser = ParserUtils.createTimestampParser("zZ z EEE MMM dd yy HH:mm:ss"); + parser = TimestampParser.createTimestampParser("zZ z EEE MMM dd yy HH:mm:ss"); Assert.assertEquals(new DateTime(2005, 1, 22, 13, 0, DateTimeZone.forOffsetHours(-6)).getMillis(), parser.apply("GMT-0600 CST Sat Jan 22 05 13:00:00").getMillis()); } - */ - - /** - * This test case checks a potentially fragile behavior - * Some timestamps will come to us in the form of GMT-OFFSET (Time Zone Abbreviation) - * The number of time zone abbreviations is long and what they mean can change - * If the offset is explicitly provided via GMT-OFFSET, we want Joda to use this instead - * of the time zone abbreviation - * @throws Exception - */ - /*@Test - public void testOffsetPriority() throws Exception { - long millis1 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)).getMillis(); - long millis2 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-6)).getMillis(); - - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ Q yyyy"); - - //Test timestamps that have an incorrect time zone abbreviation for the GMT offset. - //Joda should use the offset and not use the time zone abbreviation - Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 (ADT) 1994").getMillis()); - Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 (MDT) 1994").getMillis()); - } @Test - public void testJodaSymbolInsideLiteral() throws Exception { + public void testJodaSymbolInsideLiteral() throws Exception + { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); Assert.assertEquals(d.getMillis(), - ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy 'helloz'") + TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy 'helloz'") .apply("Wed Nov 9 04:00:00 PST 1994 helloz") .getMillis() ); Assert.assertEquals(d.getMillis(), - ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss 'helloz' z yyyy 'hello'") + TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss 'helloz' z yyyy 'hello'") .apply("Wed Nov 9 04:00:00 helloz PST 1994 hello") .getMillis() ); - }*/ - - - + } } diff --git a/pom.xml b/pom.xml index ea830e23aa0..2b9231b9ae4 100644 --- a/pom.xml +++ b/pom.xml @@ -58,6 +58,8 @@ + 1.8 + 2.11.0 1.9.0 @@ -908,6 +910,46 @@ + + de.thetaphi + forbiddenapis + 2.3 + + + + jdk-unsafe + + + ${session.executionRootDirectory}/codestyle/joda-time-forbidden-apis.txt + + + + + validate + validate + + check + + + + + jdk-unsafe + jdk-system-out + + + + + testValidate + validate + + testCheck + + + + org.codehaus.mojo animal-sniffer-maven-plugin @@ -1094,8 +1136,8 @@ org.apache.maven.plugins maven-compiler-plugin - 1.8 - 1.8 + ${maven.compiler.target} + ${maven.compiler.target} @@ -1116,8 +1158,8 @@ true 1024m 3000m - 1.8 - 1.8 + ${maven.compiler.target} + ${maven.compiler.target} false -XepDisableWarningsInGeneratedCode diff --git a/processing/src/main/java/io/druid/jackson/JodaStuff.java b/processing/src/main/java/io/druid/jackson/JodaStuff.java index 380e9ef1a82..43e569ab9e7 100644 --- a/processing/src/main/java/io/druid/jackson/JodaStuff.java +++ b/processing/src/main/java/io/druid/jackson/JodaStuff.java @@ -30,6 +30,8 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; import com.fasterxml.jackson.datatype.joda.deser.DurationDeserializer; import com.fasterxml.jackson.datatype.joda.deser.PeriodDeserializer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Interval; @@ -72,7 +74,7 @@ class JodaStuff public Interval deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { - return new Interval(jsonParser.getText()); + return Intervals.of(jsonParser.getText()); } } @@ -81,7 +83,7 @@ class JodaStuff @Override public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException, JsonProcessingException { - return new DateTime(key); + return DateTimes.of(key); } } @@ -98,7 +100,7 @@ class JodaStuff { JsonToken t = jp.getCurrentToken(); if (t == JsonToken.VALUE_NUMBER_INT) { - return new DateTime(jp.getLongValue()); + return DateTimes.utc(jp.getLongValue()); } if (t == JsonToken.VALUE_STRING) { String str = jp.getText().trim(); diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index 0c942a7a3cc..1d074d4efb9 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; @@ -894,7 +895,7 @@ public class Druids public ResultBuilder() { - timestamp = new DateTime(0); + timestamp = DateTimes.EPOCH; value = null; } diff --git a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java index f4d3467fb31..26422510540 100644 --- a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java @@ -22,6 +22,7 @@ package io.druid.query; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.guava.Sequence; @@ -41,8 +42,6 @@ import java.util.concurrent.ExecutorService; */ public class IntervalChunkingQueryRunner implements QueryRunner { - private static final DateTime EPOCH = new DateTime(0L); - private final QueryRunner baseRunner; private final QueryToolChest> toolChest; @@ -68,7 +67,7 @@ public class IntervalChunkingQueryRunner implements QueryRunner final Period chunkPeriod = getChunkPeriod(queryPlus.getQuery()); // Check for non-empty chunkPeriod, avoiding toStandardDuration since that cannot handle periods like P1M. - if (EPOCH.plus(chunkPeriod).getMillis() == EPOCH.getMillis()) { + if (DateTimes.EPOCH.plus(chunkPeriod).getMillis() == DateTimes.EPOCH.getMillis()) { return baseRunner.run(queryPlus, responseContext); } @@ -124,7 +123,7 @@ public class IntervalChunkingQueryRunner implements QueryRunner ); } - private Iterable splitInterval(Interval interval, Period period) + private static Iterable splitInterval(Interval interval, Period period) { if (interval.getEndMillis() == interval.getStartMillis()) { return Lists.newArrayList(interval); @@ -133,15 +132,15 @@ public class IntervalChunkingQueryRunner implements QueryRunner List intervals = Lists.newArrayList(); Iterator timestamps = new PeriodGranularity(period, null, null).getIterable(interval).iterator(); - long start = Math.max(timestamps.next().getStartMillis(), interval.getStartMillis()); + DateTime start = DateTimes.max(timestamps.next().getStart(), interval.getStart()); while (timestamps.hasNext()) { - long end = timestamps.next().getStartMillis(); + DateTime end = timestamps.next().getStart(); intervals.add(new Interval(start, end)); start = end; } - if (start < interval.getEndMillis()) { - intervals.add(new Interval(start, interval.getEndMillis())); + if (start.compareTo(interval.getEnd()) < 0) { + intervals.add(new Interval(start, interval.getEnd())); } return intervals; diff --git a/processing/src/main/java/io/druid/query/TimewarpOperator.java b/processing/src/main/java/io/druid/query/TimewarpOperator.java index 5f45a76192b..64ed0cdd9af 100644 --- a/processing/src/main/java/io/druid/query/TimewarpOperator.java +++ b/processing/src/main/java/io/druid/query/TimewarpOperator.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import io.druid.data.input.MapBasedRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.spec.MultipleIntervalSegmentSpec; @@ -69,7 +70,7 @@ public class TimewarpOperator implements PostProcessingOperator @Override public QueryRunner postProcess(QueryRunner baseQueryRunner) { - return postProcess(baseQueryRunner, DateTime.now().getMillis()); + return postProcess(baseQueryRunner, DateTimes.nowUtc().getMillis()); } public QueryRunner postProcess(final QueryRunner baseRunner, final long now) @@ -84,7 +85,8 @@ public class TimewarpOperator implements PostProcessingOperator final Interval interval = queryPlus.getQuery().getIntervals().get(0); final Interval modifiedInterval = new Interval( Math.min(interval.getStartMillis() + offset, now + offset), - Math.min(interval.getEndMillis() + offset, now + offset) + Math.min(interval.getEndMillis() + offset, now + offset), + interval.getChronology() ); return Sequences.map( baseRunner.run( @@ -113,9 +115,9 @@ public class TimewarpOperator implements PostProcessingOperator final DateTime maxTime = boundary.getMaxTime(); return (T) ((TimeBoundaryQuery) queryPlus.getQuery()).buildResult( - new DateTime(Math.min(res.getTimestamp().getMillis() - offset, now)), + DateTimes.utc(Math.min(res.getTimestamp().getMillis() - offset, now)), minTime != null ? minTime.minus(offset) : null, - maxTime != null ? new DateTime(Math.min(maxTime.getMillis() - offset, now)) : null + maxTime != null ? DateTimes.utc(Math.min(maxTime.getMillis() - offset, now)) : null ).iterator().next(); } return (T) new Result(res.getTimestamp().minus(offset), value); diff --git a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java index d750c97cc55..06a5b259607 100644 --- a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java @@ -22,7 +22,8 @@ package io.druid.query.datasourcemetadata; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.Druids; @@ -32,7 +33,6 @@ import io.druid.query.filter.DimFilter; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTime; -import org.joda.time.Interval; import java.util.Collections; import java.util.List; @@ -42,9 +42,7 @@ import java.util.Map; */ public class DataSourceMetadataQuery extends BaseQuery> { - public static final Interval MY_Y2K_INTERVAL = new Interval( - JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT - ); + private static final QuerySegmentSpec DEFAULT_SEGMENT_SPEC = new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY); @JsonCreator public DataSourceMetadataQuery( @@ -53,13 +51,7 @@ public class DataSourceMetadataQuery extends BaseQuery context ) { - super( - dataSource, - (querySegmentSpec == null) ? new MultipleIntervalSegmentSpec(Collections.singletonList(MY_Y2K_INTERVAL)) - : querySegmentSpec, - false, - context - ); + super(dataSource, querySegmentSpec == null ? DEFAULT_SEGMENT_SPEC : querySegmentSpec, false, context); } @Override @@ -112,7 +104,7 @@ public class DataSourceMetadataQuery extends BaseQuery result : results) { DateTime currMaxIngestedEventTime = result.getValue().getMaxIngestedEventTime(); if (currMaxIngestedEventTime != null && currMaxIngestedEventTime.isAfter(max)) { diff --git a/processing/src/main/java/io/druid/query/expression/ExprUtils.java b/processing/src/main/java/io/druid/query/expression/ExprUtils.java index c215de9db41..74d9218b258 100644 --- a/processing/src/main/java/io/druid/query/expression/ExprUtils.java +++ b/processing/src/main/java/io/druid/query/expression/ExprUtils.java @@ -22,9 +22,11 @@ package io.druid.query.expression; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.Expr; +import org.joda.time.Chronology; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; public class ExprUtils { @@ -56,13 +58,6 @@ public class ExprUtils final DateTime origin; final DateTimeZone timeZone; - if (originArg == null) { - origin = null; - } else { - final Object value = originArg.eval(bindings).value(); - origin = value != null ? new DateTime(value) : null; - } - if (timeZoneArg == null) { timeZone = null; } else { @@ -70,6 +65,14 @@ public class ExprUtils timeZone = value != null ? DateTimeZone.forID(value) : null; } + if (originArg == null) { + origin = null; + } else { + Chronology chronology = timeZone == null ? ISOChronology.getInstanceUTC() : ISOChronology.getInstance(timeZone); + final Object value = originArg.eval(bindings).value(); + origin = value != null ? new DateTime(value, chronology) : null; + } + return new PeriodGranularity(period, origin, timeZone); } } diff --git a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java index 301a11bf3d0..48ce083e730 100644 --- a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java +++ b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java @@ -19,13 +19,13 @@ package io.druid.query.expression; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprMacroTable; -import org.joda.time.DateTime; import javax.annotation.Nonnull; import java.util.List; @@ -67,7 +67,7 @@ public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro @Override public ExprEval eval(final ObjectBinding bindings) { - return ExprEval.of(granularity.bucketEnd(new DateTime(arg.eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketEnd(DateTimes.utc(arg.eval(bindings).asLong())).getMillis()); } @Override @@ -102,7 +102,7 @@ public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro public ExprEval eval(final ObjectBinding bindings) { final PeriodGranularity granularity = getGranularity(args, bindings); - return ExprEval.of(granularity.bucketEnd(new DateTime(args.get(0).eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketEnd(DateTimes.utc(args.get(0).eval(bindings).asLong())).getMillis()); } @Override diff --git a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java index 261e9caf879..19c64415e5f 100644 --- a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java +++ b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java @@ -21,6 +21,7 @@ package io.druid.query.expression; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprMacroTable; @@ -70,7 +71,7 @@ public class TimestampExtractExprMacro implements ExprMacroTable.ExprMacro } final Expr arg = args.get(0); - final Unit unit = Unit.valueOf(((String) args.get(1).getLiteralValue()).toUpperCase()); + final Unit unit = Unit.valueOf(StringUtils.toUpperCase((String) args.get(1).getLiteralValue())); final DateTimeZone timeZone; if (args.size() > 2) { @@ -87,7 +88,7 @@ public class TimestampExtractExprMacro implements ExprMacroTable.ExprMacro @Override public ExprEval eval(final ObjectBinding bindings) { - final DateTime dateTime = new DateTime(arg.eval(bindings).asLong()).withChronology(chronology); + final DateTime dateTime = new DateTime(arg.eval(bindings).asLong(), chronology); switch (unit) { case EPOCH: return ExprEval.of(dateTime.getMillis()); diff --git a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java index cf660ba346a..fd81e51fc94 100644 --- a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java +++ b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java @@ -19,13 +19,13 @@ package io.druid.query.expression; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprMacroTable; -import org.joda.time.DateTime; import javax.annotation.Nonnull; import java.util.List; @@ -77,7 +77,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro @Override public ExprEval eval(final ObjectBinding bindings) { - return ExprEval.of(granularity.bucketStart(new DateTime(arg.eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketStart(DateTimes.utc(arg.eval(bindings).asLong())).getMillis()); } @Override @@ -102,7 +102,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro public ExprEval eval(final ObjectBinding bindings) { final PeriodGranularity granularity = getGranularity(args, bindings); - return ExprEval.of(granularity.bucketStart(new DateTime(args.get(0).eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketStart(DateTimes.utc(args.get(0).eval(bindings).asLong())).getMillis()); } @Override diff --git a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java index fa66b5c2d75..3a2153b1966 100644 --- a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java @@ -22,11 +22,13 @@ package io.druid.query.extraction; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import io.druid.common.guava.GuavaUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; @@ -124,7 +126,7 @@ public class TimeFormatExtractionFn implements ExtractionFn @Override public String apply(long value) { - final long truncated = granularity.bucketStart(new DateTime(value)).getMillis(); + final long truncated = granularity.bucketStart(DateTimes.utc(value)).getMillis(); return formatter == null ? String.valueOf(truncated) : formatter.print(truncated); } @@ -138,9 +140,9 @@ public class TimeFormatExtractionFn implements ExtractionFn if (asMillis && value instanceof String) { final Long theLong = GuavaUtils.tryParseLong((String) value); - return theLong == null ? apply(new DateTime(value).getMillis()) : apply(theLong.longValue()); + return theLong == null ? apply(DateTimes.of((String) value).getMillis()) : apply(theLong.longValue()); } else { - return apply(new DateTime(value).getMillis()); + return apply(new DateTime(value, ISOChronology.getInstanceUTC()).getMillis()); } } diff --git a/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java b/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java index 52e06be1a09..830619a6268 100644 --- a/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; import com.google.common.primitives.Longs; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java index 32372893165..1d2fb7b7151 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java @@ -66,11 +66,11 @@ public class GroupByQueryHelper { final GroupByQueryConfig querySpecificConfig = config.withOverrides(query); final Granularity gran = query.getGranularity(); - final long timeStart = query.getIntervals().get(0).getStartMillis(); + final DateTime timeStart = query.getIntervals().get(0).getStart(); - long granTimeStart = timeStart; + DateTime granTimeStart = timeStart; if (!(Granularities.ALL.equals(gran))) { - granTimeStart = gran.bucketStart(new DateTime(timeStart)).getMillis(); + granTimeStart = gran.bucketStart(timeStart); } final List aggs; @@ -115,7 +115,7 @@ public class GroupByQueryHelper .withDimensionsSpec(new DimensionsSpec(dimensionSchemas, null, null)) .withMetrics(aggs.toArray(new AggregatorFactory[aggs.size()])) .withQueryGranularity(gran) - .withMinTimestamp(granTimeStart) + .withMinTimestamp(granTimeStart.getMillis()) .build(); if (query.getContextValue("useOffheap", false)) { diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java index 30f598684a6..8465340ccc8 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java @@ -27,6 +27,7 @@ import io.druid.collections.NonBlockingPool; import io.druid.collections.ResourceHolder; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.BaseSequence; @@ -128,7 +129,7 @@ public class GroupByQueryEngineV2 final DateTime fudgeTimestamp = fudgeTimestampString == null ? null - : new DateTime(Long.parseLong(fudgeTimestampString)); + : DateTimes.utc(Long.parseLong(fudgeTimestampString)); return cursors.flatMap( cursor -> new BaseSequence<>( diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java index cabbaf0ea32..4a26ecf22b2 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java @@ -35,6 +35,7 @@ import io.druid.data.input.Row; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Merging; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; @@ -117,10 +118,10 @@ public class GroupByStrategyV2 implements GroupByStrategy final String timestampStringFromContext = query.getContextValue(CTX_KEY_FUDGE_TIMESTAMP, ""); if (!timestampStringFromContext.isEmpty()) { - return new DateTime(Long.parseLong(timestampStringFromContext)); + return DateTimes.utc(Long.parseLong(timestampStringFromContext)); } else if (Granularities.ALL.equals(gran)) { - final long timeStart = query.getIntervals().get(0).getStartMillis(); - return gran.getIterable(new Interval(timeStart, timeStart + 1)).iterator().next().getStart(); + final DateTime timeStart = query.getIntervals().get(0).getStart(); + return gran.getIterable(new Interval(timeStart, timeStart.plus(1))).iterator().next().getStart(); } else { return null; } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java index addccea550a..60719648a95 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java @@ -50,6 +50,7 @@ import io.druid.segment.column.ValueType; import io.druid.segment.data.IndexedInts; import io.druid.segment.serde.ComplexMetricSerde; import io.druid.segment.serde.ComplexMetrics; +import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; @@ -249,8 +250,8 @@ public class SegmentAnalyzer } if (analyzingSize()) { - final long start = storageAdapter.getMinTime().getMillis(); - final long end = storageAdapter.getMaxTime().getMillis(); + final DateTime start = storageAdapter.getMinTime(); + final DateTime end = storageAdapter.getMaxTime(); final Sequence cursors = storageAdapter.makeCursors( diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index fcb80d77c6a..4a921480f30 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -32,7 +32,7 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Inject; import io.druid.common.guava.CombiningSequence; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.impl.TimestampSpec; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Comparators; diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index 9f0c1d7e970..adc7f8da184 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.query.BaseQuery; import io.druid.query.DataSource; @@ -39,7 +39,6 @@ import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.Interval; import java.nio.ByteBuffer; -import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -54,6 +53,8 @@ public class SegmentMetadataQuery extends BaseQuery */ public static final byte[] ANALYSIS_TYPES_CACHE_PREFIX = new byte[] {(byte) 0xFF}; + private static final QuerySegmentSpec DEFAULT_SEGMENT_SPEC = new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY); + public enum AnalysisType { CARDINALITY, @@ -84,10 +85,6 @@ public class SegmentMetadataQuery extends BaseQuery } } - public static final Interval DEFAULT_INTERVAL = new Interval( - JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT - ); - private final ColumnIncluderator toInclude; private final boolean merge; private final boolean usingDefaultInterval; @@ -106,13 +103,7 @@ public class SegmentMetadataQuery extends BaseQuery @JsonProperty("lenientAggregatorMerge") Boolean lenientAggregatorMerge ) { - super( - dataSource, - (querySegmentSpec == null) ? new MultipleIntervalSegmentSpec(Collections.singletonList(DEFAULT_INTERVAL)) - : querySegmentSpec, - false, - context - ); + super(dataSource, querySegmentSpec == null ? DEFAULT_SEGMENT_SPEC : querySegmentSpec, false, context); if (querySegmentSpec == null) { this.usingDefaultInterval = true; diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index ec932823fc0..6e9aa50956a 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -30,6 +30,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; @@ -54,7 +55,6 @@ import io.druid.query.filter.DimFilter; import io.druid.query.search.search.SearchHit; import io.druid.query.search.search.SearchQuery; import io.druid.query.search.search.SearchQueryConfig; -import org.joda.time.DateTime; import javax.annotation.Nullable; import java.nio.ByteBuffer; @@ -253,7 +253,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest( - new DateTime(((Number) result.get(0)).longValue()), + DateTimes.utc(((Number) result.get(0)).longValue()), new SearchResultValue( Lists.transform( (List) result.get(1), @@ -279,7 +279,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest( - new DateTime(((Number) result.get(0)).longValue()), + DateTimes.utc(((Number) result.get(0)).longValue()), new SearchResultValue( Lists.transform( (List) result.get(1), diff --git a/processing/src/main/java/io/druid/query/select/EventHolder.java b/processing/src/main/java/io/druid/query/select/EventHolder.java index 9120b2075df..b738d403059 100644 --- a/processing/src/main/java/io/druid/query/select/EventHolder.java +++ b/processing/src/main/java/io/druid/query/select/EventHolder.java @@ -22,6 +22,7 @@ package io.druid.query.select; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import org.joda.time.DateTime; @@ -52,8 +53,10 @@ public class EventHolder public DateTime getTimestamp() { Object retVal = event.get(timestampKey); - if (retVal instanceof String || retVal instanceof Long) { - return new DateTime(retVal); + if (retVal instanceof Long) { + return DateTimes.utc((Long) retVal); + } else if (retVal instanceof String) { + return DateTimes.of((String) retVal); } else if (retVal instanceof DateTime) { return (DateTime) retVal; } else { diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java index 85099f18ba5..36ae0de6ddf 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java @@ -26,16 +26,17 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.query.ColumnSelectorPlus; import io.druid.query.QueryRunnerHelper; import io.druid.query.Result; -import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ColumnSelectorStrategy; import io.druid.query.dimension.ColumnSelectorStrategyFactory; +import io.druid.query.dimension.DefaultDimensionSpec; +import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.Filter; import io.druid.segment.ColumnValueSelector; import io.druid.segment.Cursor; @@ -53,7 +54,6 @@ import io.druid.segment.column.ValueType; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.Filters; import io.druid.timeline.DataSegmentUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.ArrayList; @@ -300,7 +300,7 @@ public class SelectQueryEngine ) { final Map theEvent = Maps.newLinkedHashMap(); - theEvent.put(timestampKey, new DateTime(timestampColumnSelector.getLong())); + theEvent.put(timestampKey, DateTimes.utc(timestampColumnSelector.getLong())); for (ColumnSelectorPlus selectorPlus : selectorPlusList) { selectorPlus.getColumnSelectorStrategy().addRowValuesToSelectResult(selectorPlus.getOutputName(), selectorPlus.getSelector(), theEvent); diff --git a/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java b/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java index 46a0034e781..9fe23e10b3c 100644 --- a/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java @@ -20,15 +20,15 @@ package io.druid.query.spec; import com.fasterxml.jackson.annotation.JsonCreator; -import com.google.common.base.Function; -import com.google.common.collect.Lists; import io.druid.java.util.common.IAE; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; /** */ @@ -49,17 +49,10 @@ public class LegacySegmentSpec extends MultipleIntervalSegmentSpec throw new IAE("Unknown type[%s] for intervals[%s]", intervals.getClass(), intervals); } - return Lists.transform( - intervalStringList, - new Function() - { - @Override - public Interval apply(Object input) - { - return new Interval(input); - } - } - ); + return intervalStringList + .stream() + .map(input -> new Interval(input, ISOChronology.getInstanceUTC())) + .collect(Collectors.toList()); } @JsonCreator diff --git a/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java b/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java index 92a57cc4966..888a4c8e4ea 100644 --- a/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java @@ -21,7 +21,7 @@ package io.druid.query.spec; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; diff --git a/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java b/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java index 01a05fb15c1..20f6d36b14b 100644 --- a/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import com.google.common.collect.Iterables; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java index e97f7cdc11b..ce7d2f247c6 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.query.BaseQuery; import io.druid.query.DataSource; @@ -34,10 +35,8 @@ import io.druid.query.filter.DimFilter; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTime; -import org.joda.time.Interval; import java.nio.ByteBuffer; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -45,10 +44,7 @@ import java.util.Map; */ public class TimeBoundaryQuery extends BaseQuery> { - public static final Interval MY_Y2K_INTERVAL = new Interval( - new DateTime("0000-01-01"), - new DateTime("3000-01-01") - ); + private static final QuerySegmentSpec DEFAULT_SEGMENT_SPEC = new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY); public static final String MAX_TIME = "maxTime"; public static final String MIN_TIME = "minTime"; @@ -66,13 +62,7 @@ public class TimeBoundaryQuery extends BaseQuery @JsonProperty("context") Map context ) { - super( - dataSource, - (querySegmentSpec == null) ? new MultipleIntervalSegmentSpec(Collections.singletonList(MY_Y2K_INTERVAL)) - : querySegmentSpec, - false, - context - ); + super(dataSource, querySegmentSpec == null ? DEFAULT_SEGMENT_SPEC : querySegmentSpec, false, context); this.dimFilter = dimFilter; this.bound = bound == null ? "" : bound; @@ -159,8 +149,8 @@ public class TimeBoundaryQuery extends BaseQuery return Lists.newArrayList(); } - DateTime min = new DateTime(JodaUtils.MAX_INSTANT); - DateTime max = new DateTime(JodaUtils.MIN_INSTANT); + DateTime min = DateTimes.MAX; + DateTime max = DateTimes.MIN; for (Result result : results) { TimeBoundaryResultValue val = result.getValue(); diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java index c53c05ebcc2..a36461d2234 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java @@ -27,6 +27,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.BySegmentSkippingQueryRunner; @@ -41,7 +42,6 @@ import io.druid.query.QueryToolChest; import io.druid.query.Result; import io.druid.query.aggregation.MetricManipulationFn; import io.druid.timeline.LogicalSegment; -import org.joda.time.DateTime; import java.nio.ByteBuffer; import java.util.List; @@ -197,7 +197,7 @@ public class TimeBoundaryQueryQueryToolChest List result = (List) input; return new Result<>( - new DateTime(((Number) result.get(0)).longValue()), + DateTimes.utc(((Number) result.get(0)).longValue()), new TimeBoundaryResultValue(result.get(1)) ); } diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java index d2498ecc54f..fbeba284998 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java @@ -22,6 +22,7 @@ package io.druid.query.timeboundary; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.BaseSequence; @@ -103,7 +104,7 @@ public class TimeBoundaryQueryRunnerFactory return null; } final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME); - final DateTime timestamp = new DateTime(timestampColumnSelector.getLong()); + final DateTime timestamp = DateTimes.utc(timestampColumnSelector.getLong()); return new Result<>(adapter.getInterval().getStart(), timestamp); } }; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java index 18982199787..732f768335d 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java @@ -21,9 +21,11 @@ package io.druid.query.timeboundary; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import org.joda.time.DateTime; +import javax.annotation.Nullable; import java.util.Map; /** @@ -46,6 +48,7 @@ public class TimeBoundaryResultValue return value; } + @Nullable public DateTime getMaxTime() { if (value instanceof Map) { @@ -55,6 +58,7 @@ public class TimeBoundaryResultValue } } + @Nullable public DateTime getMinTime() { if (value instanceof Map) { @@ -97,7 +101,8 @@ public class TimeBoundaryResultValue '}'; } - private DateTime getDateTimeValue(Object val) + @Nullable + private DateTime getDateTimeValue(@Nullable Object val) { if (val == null) { return null; @@ -105,8 +110,10 @@ public class TimeBoundaryResultValue if (val instanceof DateTime) { return (DateTime) val; - } else if (val instanceof String || val instanceof Long) { - return new DateTime(val); + } else if (val instanceof String) { + return DateTimes.of((String) val); + } else if (val instanceof Long) { + return DateTimes.utc((Long) val); } else { throw new IAE("Cannot get time from type[%s]", val.getClass()); } diff --git a/processing/src/main/java/io/druid/segment/IndexIO.java b/processing/src/main/java/io/druid/segment/IndexIO.java index 8057f36eba3..494b556fd3c 100644 --- a/processing/src/main/java/io/druid/segment/IndexIO.java +++ b/processing/src/main/java/io/druid/segment/IndexIO.java @@ -39,6 +39,7 @@ import io.druid.common.utils.SerializerUtils; import io.druid.java.util.common.IAE; import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.Smoosh; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; @@ -335,7 +336,7 @@ public class IndexIO GenericIndexed.STRING_STRATEGY, smooshedFiles ); - final Interval dataInterval = new Interval(serializerUtils.readString(indexBuffer)); + final Interval dataInterval = Intervals.of(serializerUtils.readString(indexBuffer)); final BitmapSerdeFactory bitmapSerdeFactory = new BitmapSerde.LegacyBitmapSerdeFactory(); CompressedLongsIndexedSupplier timestamps = CompressedLongsIndexedSupplier.fromByteBuffer( @@ -558,7 +559,7 @@ public class IndexIO GenericIndexed.STRING_STRATEGY, smooshedFiles ); - final Interval dataInterval = new Interval(indexBuffer.getLong(), indexBuffer.getLong()); + final Interval dataInterval = Intervals.utc(indexBuffer.getLong(), indexBuffer.getLong()); final BitmapSerdeFactory segmentBitmapSerdeFactory; /** diff --git a/processing/src/main/java/io/druid/segment/IndexMergerV9.java b/processing/src/main/java/io/druid/segment/IndexMergerV9.java index 8a9868fed1d..af59c1522d7 100644 --- a/processing/src/main/java/io/druid/segment/IndexMergerV9.java +++ b/processing/src/main/java/io/druid/segment/IndexMergerV9.java @@ -34,7 +34,8 @@ import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import com.google.inject.Inject; import io.druid.collections.CombiningIterable; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.io.ZeroCopyByteArrayOutputStream; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; @@ -320,8 +321,8 @@ public class IndexMergerV9 implements IndexMerger cols.writeToChannel(writer); dims.writeToChannel(writer); - DateTime minTime = new DateTime(JodaUtils.MAX_INSTANT); - DateTime maxTime = new DateTime(JodaUtils.MIN_INSTANT); + DateTime minTime = DateTimes.MAX; + DateTime maxTime = DateTimes.MIN; for (IndexableAdapter index : adapters) { minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart()); @@ -663,14 +664,14 @@ public class IndexMergerV9 implements IndexMerger throw new IAE("Trying to persist an empty index!"); } - final long firstTimestamp = index.getMinTime().getMillis(); - final long lastTimestamp = index.getMaxTime().getMillis(); + final DateTime firstTimestamp = index.getMinTime(); + final DateTime lastTimestamp = index.getMaxTime(); if (!(dataInterval.contains(firstTimestamp) && dataInterval.contains(lastTimestamp))) { throw new IAE( "interval[%s] does not encapsulate the full range of timestamps[%s, %s]", dataInterval, - new DateTime(firstTimestamp), - new DateTime(lastTimestamp) + firstTimestamp, + lastTimestamp ); } diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java index 1a5eb25e038..32a8451fc89 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import io.druid.collections.bitmap.ImmutableBitmap; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -126,7 +127,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter public DateTime getMinTime() { try (final GenericColumn column = index.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn()) { - return new DateTime(column.getLongSingleValueRow(0)); + return DateTimes.utc(column.getLongSingleValueRow(0)); } } @@ -134,7 +135,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter public DateTime getMaxTime() { try (final GenericColumn column = index.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn()) { - return new DateTime(column.getLongSingleValueRow(column.length() - 1)); + return DateTimes.utc(column.getLongSingleValueRow(column.length() - 1)); } } @@ -206,12 +207,11 @@ public class QueryableIndexStorageAdapter implements StorageAdapter { Interval actualInterval = interval; - long minDataTimestamp = getMinTime().getMillis(); - long maxDataTimestamp = getMaxTime().getMillis(); - final Interval dataInterval = new Interval( - minDataTimestamp, - gran.bucketEnd(getMaxTime()).getMillis() - ); + DateTime minTime = getMinTime(); + long minDataTimestamp = minTime.getMillis(); + DateTime maxTime = getMaxTime(); + long maxDataTimestamp = maxTime.getMillis(); + final Interval dataInterval = new Interval(minTime, gran.bucketEnd(maxTime)); if (!actualInterval.overlaps(dataInterval)) { return Sequences.empty(); diff --git a/processing/src/main/java/io/druid/segment/Rowboat.java b/processing/src/main/java/io/druid/segment/Rowboat.java index 6d66669f75d..1ef2dbf3aa1 100644 --- a/processing/src/main/java/io/druid/segment/Rowboat.java +++ b/processing/src/main/java/io/druid/segment/Rowboat.java @@ -21,10 +21,10 @@ package io.druid.segment; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; +import io.druid.java.util.common.DateTimes; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import it.unimi.dsi.fastutil.ints.IntRBTreeSet; import it.unimi.dsi.fastutil.ints.IntSortedSet; -import org.joda.time.DateTime; import java.util.Arrays; @@ -132,7 +132,7 @@ public class Rowboat implements Comparable public String toString() { return "Rowboat{" + - "timestamp=" + new DateTime(timestamp).toString() + + "timestamp=" + DateTimes.utc(timestamp) + ", dims=" + Arrays.deepToString(dims) + ", metrics=" + Arrays.toString(metrics) + ", comprisedRows=" + comprisedRows + diff --git a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedDoubleSupplier.java b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedDoubleSupplier.java index f64ae27a90e..49508315640 100644 --- a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedDoubleSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedDoubleSupplier.java @@ -22,6 +22,7 @@ package io.druid.segment.data; import com.google.common.base.Supplier; import com.google.common.primitives.Doubles; import io.druid.collections.ResourceHolder; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; @@ -105,7 +106,7 @@ public class BlockLayoutIndexedDoubleSupplier implements Supplier implements Iterable, { row = formatRow(row); if (row.getTimestampFromEpoch() < minTimestamp) { - throw new IAE("Cannot add row[%s] because it is below the minTimestamp[%s]", row, new DateTime(minTimestamp)); + throw new IAE("Cannot add row[%s] because it is below the minTimestamp[%s]", row, DateTimes.utc(minTimestamp)); } final List rowDimensions = row.getDimensions(); @@ -682,17 +683,20 @@ public abstract class IncrementalIndex implements Iterable, public Interval getInterval() { - return new Interval(minTimestamp, isEmpty() ? minTimestamp : gran.increment(new DateTime(getMaxTimeMillis())).getMillis()); + DateTime min = DateTimes.utc(minTimestamp); + return new Interval(min, isEmpty() ? min : gran.increment(DateTimes.utc(getMaxTimeMillis()))); } + @Nullable public DateTime getMinTime() { - return isEmpty() ? null : new DateTime(getMinTimeMillis()); + return isEmpty() ? null : DateTimes.utc(getMinTimeMillis()); } + @Nullable public DateTime getMaxTime() { - return isEmpty() ? null : new DateTime(getMaxTimeMillis()); + return isEmpty() ? null : DateTimes.utc(getMaxTimeMillis()); } public Integer getDimensionIndex(String dimension) @@ -1010,7 +1014,7 @@ public abstract class IncrementalIndex implements Iterable, public String toString() { return "TimeAndDims{" + - "timestamp=" + new DateTime(timestamp) + + "timestamp=" + DateTimes.utc(timestamp) + ", dims=" + Lists.transform( Arrays.asList(dims), new Function() { diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java index b747df94075..e23b0e4d0fb 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java @@ -208,10 +208,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter Interval actualIntervalTmp = interval; - final Interval dataInterval = new Interval( - getMinTime().getMillis(), - gran.bucketEnd(getMaxTime()).getMillis() - ); + final Interval dataInterval = new Interval(getMinTime(), gran.bucketEnd(getMaxTime())); if (!actualIntervalTmp.overlaps(dataInterval)) { return Sequences.empty(); diff --git a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java index 02e88dc77f7..b60efee729a 100644 --- a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java +++ b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java @@ -19,9 +19,9 @@ package io.druid.collections; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.Result; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -38,12 +38,12 @@ public class CombiningIterableTest public void testMerge() { List> resultsBefore = Arrays.asList( - new Result(new DateTime("2011-01-01"), 1L), - new Result(new DateTime("2011-01-01"), 2L) + new Result(DateTimes.of("2011-01-01"), 1L), + new Result(DateTimes.of("2011-01-01"), 2L) ); Iterable> expectedResults = Arrays.>asList( - new Result(new DateTime("2011-01-01"), 3L) + new Result(DateTimes.of("2011-01-01"), 3L) ); Iterable> resultsAfter = CombiningIterable.create( diff --git a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java index b7466914745..166381ce95b 100644 --- a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java +++ b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java @@ -25,6 +25,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; @@ -56,7 +58,7 @@ public class QueryGranularityTest @Test public void testIterableNone() throws Exception { - final Iterator iterator = Granularities.NONE.getIterable(new Interval(0, 1000)).iterator(); + final Iterator iterator = Granularities.NONE.getIterable(Intervals.utc(0, 1000)).iterator(); int count = 0; while (iterator.hasNext()) { Assert.assertEquals(count, iterator.next().getStartMillis()); @@ -67,252 +69,246 @@ public class QueryGranularityTest @Test public void testIterableMinuteSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:38:00.000Z"), - new DateTime("2011-01-01T09:39:00.000Z"), - new DateTime("2011-01-01T09:40:00.000Z") + DateTimes.of("2011-01-01T09:38:00.000Z"), + DateTimes.of("2011-01-01T09:39:00.000Z"), + DateTimes.of("2011-01-01T09:40:00.000Z") ), - Granularities.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) + Granularities.MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.THREE))) ); } @Test public void testIterableMinuteComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:38:00.000Z"), - new DateTime("2011-01-01T09:39:00.000Z"), - new DateTime("2011-01-01T09:40:00.000Z"), - new DateTime("2011-01-01T09:41:00.000Z") + DateTimes.of("2011-01-01T09:38:00.000Z"), + DateTimes.of("2011-01-01T09:39:00.000Z"), + DateTimes.of("2011-01-01T09:40:00.000Z"), + DateTimes.of("2011-01-01T09:41:00.000Z") ), - Granularities.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) + Granularities.MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.THREE))) ); } @Test public void testIterable15MinuteSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:30:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:30:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:30:00.000Z"), - new DateTime("2011-01-01T09:45:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z") + DateTimes.of("2011-01-01T09:30:00.000Z"), + DateTimes.of("2011-01-01T09:45:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z") ), - Granularities.FIFTEEN_MINUTE.getIterable( - new Interval( - baseTime.getMillis(), baseTime.plus(Minutes.minutes(45)).getMillis() - )) + Granularities.FIFTEEN_MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.minutes(45)))) ); } @Test public void testIterable15MinuteComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:30:00.000Z"), - new DateTime("2011-01-01T09:45:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z"), - new DateTime("2011-01-01T10:15:00.000Z") + DateTimes.of("2011-01-01T09:30:00.000Z"), + DateTimes.of("2011-01-01T09:45:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z"), + DateTimes.of("2011-01-01T10:15:00.000Z") ), - Granularities.FIFTEEN_MINUTE.getIterable(new Interval( - baseTime.getMillis(), - baseTime.plus(Minutes.minutes(45)).getMillis() - )) + Granularities.FIFTEEN_MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.minutes(45)))) ); } @Test public void testIterableHourSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:00:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z"), - new DateTime("2011-01-01T11:00:00.000Z") - ), Granularities.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) + DateTimes.of("2011-01-01T09:00:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z"), + DateTimes.of("2011-01-01T11:00:00.000Z") + ), Granularities.HOUR.getIterable(new Interval(baseTime, baseTime.plus(Hours.hours(3)))) ); } @Test public void testIterableHourComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:00:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z"), - new DateTime("2011-01-01T11:00:00.000Z"), - new DateTime("2011-01-01T12:00:00.000Z") - ), Granularities.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) + DateTimes.of("2011-01-01T09:00:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z"), + DateTimes.of("2011-01-01T11:00:00.000Z"), + DateTimes.of("2011-01-01T12:00:00.000Z") + ), Granularities.HOUR.getIterable(new Interval(baseTime, baseTime.plus(Hours.hours(3)))) ); } @Test public void testIterableDaySimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-01-02T00:00:00.000Z"), - new DateTime("2011-01-03T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-02T00:00:00.000Z"), + DateTimes.of("2011-01-03T00:00:00.000Z") ), - Granularities.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.DAY.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @Test public void testIterableDayComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-01-02T00:00:00.000Z"), - new DateTime("2011-01-03T00:00:00.000Z"), - new DateTime("2011-01-04T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-02T00:00:00.000Z"), + DateTimes.of("2011-01-03T00:00:00.000Z"), + DateTimes.of("2011-01-04T00:00:00.000Z") ), - Granularities.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.DAY.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @Test public void testIterableWeekSimple() { - final DateTime baseTime = new DateTime("2011-01-03T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-03T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-03T00:00:00.000Z"), - new DateTime("2011-01-10T00:00:00.000Z"), - new DateTime("2011-01-17T00:00:00.000Z") + DateTimes.of("2011-01-03T00:00:00.000Z"), + DateTimes.of("2011-01-10T00:00:00.000Z"), + DateTimes.of("2011-01-17T00:00:00.000Z") ), - Granularities.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) + Granularities.WEEK.getIterable(new Interval(baseTime, baseTime.plus(Weeks.THREE))) ); } @Test public void testIterableWeekComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2010-12-27T00:00:00.000Z"), - new DateTime("2011-01-03T00:00:00.000Z"), - new DateTime("2011-01-10T00:00:00.000Z"), - new DateTime("2011-01-17T00:00:00.000Z") + DateTimes.of("2010-12-27T00:00:00.000Z"), + DateTimes.of("2011-01-03T00:00:00.000Z"), + DateTimes.of("2011-01-10T00:00:00.000Z"), + DateTimes.of("2011-01-17T00:00:00.000Z") ), - Granularities.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) + Granularities.WEEK.getIterable(new Interval(baseTime, baseTime.plus(Weeks.THREE))) ); } @Test public void testIterableMonthSimple() { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-02-01T00:00:00.000Z"), - new DateTime("2011-03-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z") ), - Granularities.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) + Granularities.MONTH.getIterable(new Interval(baseTime, baseTime.plus(Months.THREE))) ); } @Test public void testIterableMonthComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-02-01T00:00:00.000Z"), - new DateTime("2011-03-01T00:00:00.000Z"), - new DateTime("2011-04-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z") ), - Granularities.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) + Granularities.MONTH.getIterable(new Interval(baseTime, baseTime.plus(Months.THREE))) ); } @Test public void testIterableQuarterSimple() { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-04-01T00:00:00.000Z"), - new DateTime("2011-07-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-07-01T00:00:00.000Z") ), - Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) + Granularities.QUARTER.getIterable(new Interval(baseTime, baseTime.plus(Months.NINE))) ); } @Test public void testIterableQuarterComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-04-01T00:00:00.000Z"), - new DateTime("2011-07-01T00:00:00.000Z"), - new DateTime("2011-10-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-07-01T00:00:00.000Z"), + DateTimes.of("2011-10-01T00:00:00.000Z") ), - Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) + Granularities.QUARTER.getIterable(new Interval(baseTime, baseTime.plus(Months.NINE))) ); } @Test public void testIterableYearSimple() { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2012-01-01T00:00:00.000Z"), - new DateTime("2013-01-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2012-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z") ), - Granularities.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) + Granularities.YEAR.getIterable(new Interval(baseTime, baseTime.plus(Years.THREE))) ); } @Test public void testIterableYearComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2012-01-01T00:00:00.000Z"), - new DateTime("2013-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2012-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), + DateTimes.of("2014-01-01T00:00:00.000Z") ), - Granularities.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) + Granularities.YEAR.getIterable(new Interval(baseTime, baseTime.plus(Years.THREE))) ); } @@ -328,7 +324,7 @@ public class QueryGranularityTest new DateTime("2012-11-06T00:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1D"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); assertSameInterval( @@ -340,7 +336,7 @@ public class QueryGranularityTest new DateTime("2012-11-04T03:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("PT1H"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(5)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Hours.hours(5)))) ); final PeriodGranularity hour = new PeriodGranularity(new Period("PT1H"), null, tz); @@ -353,11 +349,11 @@ public class QueryGranularityTest new DateTime("2012-11-04T03:00:00.000-08:00", tz) ), Lists.newArrayList( - hour.bucketStart(new DateTime("2012-11-04T00:30:00-07:00")), - hour.bucketStart(new DateTime("2012-11-04T01:30:00-07:00")), - hour.bucketStart(new DateTime("2012-11-04T01:30:00-08:00")), - hour.bucketStart(new DateTime("2012-11-04T02:30:00-08:00")), - hour.bucketStart(new DateTime("2012-11-04T03:30:00-08:00")) + hour.bucketStart(DateTimes.of("2012-11-04T00:30:00-07:00")), + hour.bucketStart(DateTimes.of("2012-11-04T01:30:00-07:00")), + hour.bucketStart(DateTimes.of("2012-11-04T01:30:00-08:00")), + hour.bucketStart(DateTimes.of("2012-11-04T02:30:00-08:00")), + hour.bucketStart(DateTimes.of("2012-11-04T03:30:00-08:00")) ) ); } @@ -375,7 +371,7 @@ public class QueryGranularityTest new DateTime("2013-02-01T00:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1M"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.months(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Months.months(3)))) ); } @@ -392,7 +388,7 @@ public class QueryGranularityTest new DateTime("2012-11-19T00:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1W"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.weeks(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3)))) ); assertSameInterval( @@ -402,7 +398,7 @@ public class QueryGranularityTest new DateTime("2012-11-17T10:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1W"), baseTime, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.weeks(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3)))) ); } @@ -410,7 +406,7 @@ public class QueryGranularityTest public void testPeriodTruncateDays() throws Exception { final DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles"); - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("P2D"), origin, @@ -423,9 +419,9 @@ public class QueryGranularityTest new DateTime("2012-01-04T05:00:00.000-08:00", tz) ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-04T07:20:04.123-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-04T07:20:04.123-08:00")) ) ); @@ -442,9 +438,9 @@ public class QueryGranularityTest new DateTime("2012-01-03T00:00:00.000-08:00", tz) ), Lists.newArrayList( - periodNoOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-04T07:20:04.123-08:00")) + periodNoOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-04T07:20:04.123-08:00")) ) ); @@ -453,18 +449,18 @@ public class QueryGranularityTest @Test public void testPeriodTruncateMinutes() throws Exception { - final DateTime origin = new DateTime("2012-01-02T00:05:00.000Z"); + final DateTime origin = DateTimes.of("2012-01-02T00:05:00.000Z"); PeriodGranularity periodOrigin = new PeriodGranularity(new Period("PT15M"), origin, null); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T04:50:00.000Z"), - new DateTime("2012-01-02T07:05:00.000Z"), - new DateTime("2012-01-04T00:20:00.000Z") + DateTimes.of("2012-01-01T04:50:00.000Z"), + DateTimes.of("2012-01-02T07:05:00.000Z"), + DateTimes.of("2012-01-04T00:20:00.000Z") ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123Z")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:08:04.123Z")), - periodOrigin.bucketStart(new DateTime("2012-01-04T00:20:04.123Z")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123Z")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:08:04.123Z")), + periodOrigin.bucketStart(DateTimes.of("2012-01-04T00:20:04.123Z")) ) ); @@ -472,14 +468,14 @@ public class QueryGranularityTest PeriodGranularity periodNoOrigin = new PeriodGranularity(new Period("PT15M"), null, null); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T05:00:00.000Z"), - new DateTime("2012-01-02T07:00:00.000Z"), - new DateTime("2012-01-04T00:15:00.000Z") + DateTimes.of("2012-01-01T05:00:00.000Z"), + DateTimes.of("2012-01-02T07:00:00.000Z"), + DateTimes.of("2012-01-04T00:15:00.000Z") ), Lists.newArrayList( - periodNoOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123Z")), - periodNoOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123Z")), - periodNoOrigin.bucketStart(new DateTime("2012-01-04T00:20:04.123Z")) + periodNoOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123Z")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123Z")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-04T00:20:04.123Z")) ) ); @@ -490,7 +486,7 @@ public class QueryGranularityTest { { final DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles"); - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("P1M2D"), origin, @@ -504,10 +500,10 @@ public class QueryGranularityTest new DateTime("2012-02-04T05:00:00.000-08:00", tz) ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-03-01T07:20:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-02-04T05:00:00.000-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-03-01T07:20:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-02-04T05:00:00.000-08:00")) ) ); @@ -524,10 +520,10 @@ public class QueryGranularityTest new DateTime("2012-02-16T00:00:00.000-08:00", tz) ), Lists.newArrayList( - periodNoOrigin.bucketStart(new DateTime("1970-01-01T05:02:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-01T05:02:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-15T07:01:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-02-16T00:00:00.000-08:00")) + periodNoOrigin.bucketStart(DateTimes.of("1970-01-01T05:02:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-01T05:02:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-15T07:01:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-02-16T00:00:00.000-08:00")) ) ); @@ -535,7 +531,7 @@ public class QueryGranularityTest { final DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles"); - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("PT12H5M"), origin, @@ -549,10 +545,10 @@ public class QueryGranularityTest new DateTime("2012-02-03T22:25:00.000-08:00", tz) ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-03T00:20:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-02-03T22:25:00.000-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00")) ) ); } @@ -562,7 +558,7 @@ public class QueryGranularityTest public void testCompoundPeriodMillisTruncate() throws Exception { { - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("PT12H5M"), origin, @@ -570,16 +566,16 @@ public class QueryGranularityTest ); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T04:50:00.000-08:00"), - new DateTime("2012-01-02T05:00:00.000-08:00"), - new DateTime("2012-01-02T17:05:00.000-08:00"), - new DateTime("2012-02-03T22:25:00.000-08:00") + DateTimes.of("2012-01-01T04:50:00.000-08:00"), + DateTimes.of("2012-01-02T05:00:00.000-08:00"), + DateTimes.of("2012-01-02T17:05:00.000-08:00"), + DateTimes.of("2012-02-03T22:25:00.000-08:00") ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-03T00:20:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-02-03T22:25:00.000-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00")) ) ); } @@ -589,23 +585,23 @@ public class QueryGranularityTest public void testDurationTruncate() throws Exception { { - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); Granularity gran = new DurationGranularity( new Period("PT12H5M").toStandardDuration().getMillis(), origin ); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T04:50:00.000-08:00"), - new DateTime("2012-01-02T05:00:00.000-08:00"), - new DateTime("2012-01-02T17:05:00.000-08:00"), - new DateTime("2012-02-03T22:25:00.000-08:00") + DateTimes.of("2012-01-01T04:50:00.000-08:00"), + DateTimes.of("2012-01-02T05:00:00.000-08:00"), + DateTimes.of("2012-01-02T17:05:00.000-08:00"), + DateTimes.of("2012-02-03T22:25:00.000-08:00") ), Lists.newArrayList( - gran.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - gran.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - gran.bucketStart(new DateTime("2012-01-03T00:20:04.123-08:00")), - gran.bucketStart(new DateTime("2012-02-03T22:25:00.000-08:00")) + gran.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + gran.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + gran.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")), + gran.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00")) ) ); } @@ -614,15 +610,15 @@ public class QueryGranularityTest @Test public void testDurationToDateTime() throws Exception { - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); Granularity gran = new DurationGranularity( new Period("PT12H5M").toStandardDuration().getMillis(), origin ); Assert.assertEquals( - new DateTime("2012-01-01T05:00:04.123-08:00"), - gran.toDateTime(new DateTime("2012-01-01T05:00:04.123-08:00").getMillis()) + DateTimes.of("2012-01-01T05:00:04.123-08:00"), + gran.toDateTime(DateTimes.of("2012-01-01T05:00:04.123-08:00").getMillis()) ); } @@ -630,22 +626,22 @@ public class QueryGranularityTest @Test public void testIterableAllSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList(baseTime), - Granularities.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.ALL.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @Test public void testIterableAllComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList(baseTime), - Granularities.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.ALL.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @@ -669,13 +665,13 @@ public class QueryGranularityTest gran = mapper.readValue(json, Granularity.class); Assert.assertEquals(new PeriodGranularity( new Period("P1D"), - new DateTime(0L), + DateTimes.EPOCH, DateTimeZone.forID("America/Los_Angeles") ), gran); PeriodGranularity expected = new PeriodGranularity( new Period("P1D"), - new DateTime("2012-01-01"), + DateTimes.of("2012-01-01"), DateTimeZone.forID("America/Los_Angeles") ); diff --git a/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java b/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java index d2f1edaaed6..2d750de3318 100644 --- a/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java +++ b/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java @@ -20,6 +20,7 @@ package io.druid.jackson; import com.fasterxml.jackson.databind.ObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.junit.Assert; @@ -34,7 +35,7 @@ public class DefaultObjectMapperTest @Test public void testDateTime() throws Exception { - final DateTime time = new DateTime(); + final DateTime time = DateTimes.nowUtc(); Assert.assertEquals(StringUtils.format("\"%s\"", time), mapper.writeValueAsString(time)); } diff --git a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java index e17b20369f4..fb0b18a80c2 100644 --- a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java @@ -31,6 +31,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -61,7 +62,6 @@ import io.druid.segment.Segment; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import org.apache.commons.io.FileUtils; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -313,7 +313,7 @@ public class MultiValuedDimensionTest Sequence> result = runner.run(QueryPlus.wrap(query), context); List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( diff --git a/processing/src/test/java/io/druid/query/QueryContextsTest.java b/processing/src/test/java/io/druid/query/QueryContextsTest.java index 81f23374a21..32050ec9261 100644 --- a/processing/src/test/java/io/druid/query/QueryContextsTest.java +++ b/processing/src/test/java/io/druid/query/QueryContextsTest.java @@ -21,8 +21,8 @@ package io.druid.query; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.query.spec.MultipleIntervalSegmentSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -36,7 +36,7 @@ public class QueryContextsTest { final Query query = new TestQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("0/100"))), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, new HashMap() ); @@ -48,7 +48,7 @@ public class QueryContextsTest { Query query = new TestQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("0/100"))), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, new HashMap() ); @@ -63,7 +63,7 @@ public class QueryContextsTest { Query query = new TestQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("0/100"))), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1000) ); diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index 0196205bb4b..4cb45b848c6 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -27,6 +27,8 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import com.metamx.emitter.core.NoopEmitter; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.UOE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -138,7 +140,7 @@ public class QueryRunnerTestHelper "ntimestamps", Arrays.asList("__time"), "function aggregate(current, t) { if (t > " + - new DateTime("2011-04-01T12:00:00Z").getMillis() + + DateTimes.of("2011-04-01T12:00:00Z").getMillis() + ") { return current + 1; } else { return current; } }", JS_RESET_0, JS_COMBINE_A_PLUS_B, @@ -238,22 +240,22 @@ public class QueryRunnerTestHelper expectedFullOnIndexValuesDesc = list.toArray(new String[list.size()]); } - public static final DateTime earliest = new DateTime("2011-01-12"); - public static final DateTime last = new DateTime("2011-04-15"); + public static final DateTime earliest = DateTimes.of("2011-01-12"); + public static final DateTime last = DateTimes.of("2011-04-15"); - public static final DateTime skippedDay = new DateTime("2011-01-21T00:00:00.000Z"); + public static final DateTime skippedDay = DateTimes.of("2011-01-21T00:00:00.000Z"); public static final QuerySegmentSpec firstToThird = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z")) + Arrays.asList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z")) ); public static final QuerySegmentSpec secondOnly = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2011-04-02T00:00:00.000Z/P1D")) + Arrays.asList(Intervals.of("2011-04-02T00:00:00.000Z/P1D")) ); public static final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); public static final QuerySegmentSpec emptyInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2020-04-02T00:00:00.000Z/P1D")) + Arrays.asList(Intervals.of("2020-04-02T00:00:00.000Z/P1D")) ); public static Iterable transformToConstructionFeeder(Iterable in) diff --git a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java index c85ed44f1f9..d819dde055b 100644 --- a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java +++ b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java @@ -19,6 +19,7 @@ package io.druid.query; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; @@ -48,7 +49,7 @@ public class ResultGranularTimestampComparatorTest this.descending = descending; } - private final DateTime time = new DateTime("2011-11-11"); + private final DateTime time = DateTimes.of("2011-11-11"); @Test public void testCompareAll() diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index e046b60d664..b9c40aeea71 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -32,8 +34,6 @@ import io.druid.query.timeseries.TimeseriesQueryQueryToolChest; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.SegmentMissingException; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -98,12 +98,7 @@ public class RetryQueryRunnerTest public Sequence> run(QueryPlus queryPlus, Map context) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } @@ -158,12 +153,7 @@ public class RetryQueryRunnerTest { if ((int) context.get("count") == 0) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", 1); return Sequences.empty(); @@ -171,7 +161,7 @@ public class RetryQueryRunnerTest return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -217,12 +207,7 @@ public class RetryQueryRunnerTest { if ((int) context.get("count") < 3) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", (int) context.get("count") + 1); return Sequences.empty(); @@ -230,7 +215,7 @@ public class RetryQueryRunnerTest return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -274,12 +259,7 @@ public class RetryQueryRunnerTest ) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } @@ -321,26 +301,16 @@ public class RetryQueryRunnerTest if ((int) context.get("count") == 0) { // assume 2 missing segments at first run ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 2 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 2) ); context.put("count", 1); return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -352,18 +322,13 @@ public class RetryQueryRunnerTest Assert.assertTrue("Should retry with 2 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 2); // assume only left 1 missing at first retry ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 2 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 2) ); context.put("count", 2); return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -378,7 +343,7 @@ public class RetryQueryRunnerTest return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) diff --git a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java index 71871864778..71b30f92789 100644 --- a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java +++ b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java @@ -31,6 +31,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.guava.Sequence; @@ -50,7 +51,6 @@ import io.druid.segment.IndexBuilder; import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexSegment; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -73,7 +73,7 @@ public class SchemaEvolutionTest public static List> timeseriesResult(final Map map) { - return ImmutableList.of(new Result<>(new DateTime("2000"), new TimeseriesResultValue((Map) map))); + return ImmutableList.of(new Result<>(DateTimes.of("2000"), new TimeseriesResultValue((Map) map))); } public static List inputRowsWithDimensions(final List dimensions) diff --git a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java index f0b6c490f0a..8b5675b646f 100644 --- a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java +++ b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; @@ -44,32 +45,26 @@ public class TimewarpOperatorTest public static final ImmutableMap CONTEXT = ImmutableMap.of(); TimewarpOperator> testOperator = new TimewarpOperator<>( - new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")), + new Interval(DateTimes.of("2014-01-01"), DateTimes.of("2014-01-15")), new Period("P1W"), - new DateTime("2014-01-06") // align on Monday + DateTimes.of("2014-01-06") // align on Monday ); @Test public void testComputeOffset() throws Exception { { - final DateTime t = new DateTime("2014-01-23"); - final DateTime tOffset = new DateTime("2014-01-09"); + final DateTime t = DateTimes.of("2014-01-23"); + final DateTime tOffset = DateTimes.of("2014-01-09"); - Assert.assertEquals( - new DateTime(tOffset), - t.plus(testOperator.computeOffset(t.getMillis())) - ); + Assert.assertEquals(tOffset, t.plus(testOperator.computeOffset(t.getMillis()))); } { - final DateTime t = new DateTime("2014-08-02"); - final DateTime tOffset = new DateTime("2014-01-11"); + final DateTime t = DateTimes.of("2014-08-02"); + final DateTime tOffset = DateTimes.of("2014-01-11"); - Assert.assertEquals( - new DateTime(tOffset), - t.plus(testOperator.computeOffset(t.getMillis())) - ); + Assert.assertEquals(tOffset, t.plus(testOperator.computeOffset(t.getMillis()))); } } @@ -88,11 +83,11 @@ public class TimewarpOperatorTest return Sequences.simple( ImmutableList.of( new Result<>( - new DateTime(new DateTime("2014-01-09")), + DateTimes.of("2014-01-09"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( - new DateTime(new DateTime("2014-01-11")), + DateTimes.of("2014-01-11"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( @@ -103,7 +98,7 @@ public class TimewarpOperatorTest ); } }, - new DateTime("2014-08-02").getMillis() + DateTimes.of("2014-08-02").getMillis() ); final Query> query = @@ -116,15 +111,15 @@ public class TimewarpOperatorTest Assert.assertEquals( Lists.newArrayList( new Result<>( - new DateTime("2014-07-31"), + DateTimes.of("2014-07-31"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ), @@ -136,9 +131,9 @@ public class TimewarpOperatorTest TimewarpOperator> timeBoundaryOperator = new TimewarpOperator<>( - new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")), + new Interval(DateTimes.of("2014-01-01"), DateTimes.of("2014-01-15")), new Period("P1W"), - new DateTime("2014-01-06") // align on Monday + DateTimes.of("2014-01-06") // align on Monday ); QueryRunner> timeBoundaryRunner = timeBoundaryOperator.postProcess( @@ -153,11 +148,11 @@ public class TimewarpOperatorTest return Sequences.simple( ImmutableList.of( new Result<>( - new DateTime("2014-01-12"), + DateTimes.of("2014-01-12"), new TimeBoundaryResultValue( ImmutableMap.of( "maxTime", - new DateTime("2014-01-12") + DateTimes.of("2014-01-12") ) ) ) @@ -165,7 +160,7 @@ public class TimewarpOperatorTest ); } }, - new DateTime("2014-08-02").getMillis() + DateTimes.of("2014-08-02").getMillis() ); final Query> timeBoundaryQuery = @@ -176,8 +171,8 @@ public class TimewarpOperatorTest Assert.assertEquals( Lists.newArrayList( new Result<>( - new DateTime("2014-08-02"), - new TimeBoundaryResultValue(ImmutableMap.of("maxTime", new DateTime("2014-08-02"))) + DateTimes.of("2014-08-02"), + new TimeBoundaryResultValue(ImmutableMap.of("maxTime", DateTimes.of("2014-08-02"))) ) ), Sequences.toList( @@ -215,7 +210,7 @@ public class TimewarpOperatorTest ); } }, - new DateTime("2014-08-02").getMillis() + DateTimes.of("2014-08-02").getMillis() ); final Query> query = @@ -228,11 +223,11 @@ public class TimewarpOperatorTest Assert.assertEquals( Lists.newArrayList( new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ) ), diff --git a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index 12dbfad4cfe..4e23243fa80 100644 --- a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -27,6 +27,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequences; import io.druid.query.DefaultGenericQueryMetricsFactory; import io.druid.query.Druids; @@ -124,7 +126,7 @@ public class DataSourceMetadataQueryTest ), new IncrementalIndexSegment(rtIndex, "test"), null ); - DateTime timestamp = new DateTime(System.currentTimeMillis()); + DateTime timestamp = DateTimes.nowUtc(); rtIndex.add( new MapBasedInputRow( timestamp.getMillis(), @@ -161,7 +163,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2012-01-01/P1D"); + return Intervals.of("2012-01-01/P1D"); } }, new LogicalSegment() @@ -169,7 +171,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2012-01-01T01/PT1H"); + return Intervals.of("2012-01-01T01/PT1H"); } }, new LogicalSegment() @@ -177,7 +179,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-01/P1D"); + return Intervals.of("2013-01-01/P1D"); } }, new LogicalSegment() @@ -185,7 +187,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-01T01/PT1H"); + return Intervals.of("2013-01-01T01/PT1H"); } }, new LogicalSegment() @@ -193,7 +195,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-01T02/PT1H"); + return Intervals.of("2013-01-01T02/PT1H"); } } ) @@ -207,7 +209,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-01/P1D"); + return Intervals.of("2013-01-01/P1D"); } }, new LogicalSegment() @@ -215,7 +217,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-01T02/PT1H"); + return Intervals.of("2013-01-01T02/PT1H"); } } ); @@ -228,7 +230,7 @@ public class DataSourceMetadataQueryTest @Test public void testResultSerialization() { - final DataSourceMetadataResultValue resultValue = new DataSourceMetadataResultValue(new DateTime("2000-01-01T00Z")); + final DataSourceMetadataResultValue resultValue = new DataSourceMetadataResultValue(DateTimes.of("2000-01-01T00Z")); final Map resultValueMap = new DefaultObjectMapper().convertValue( resultValue, new TypeReference>() @@ -252,7 +254,7 @@ public class DataSourceMetadataQueryTest resultValueMap, DataSourceMetadataResultValue.class ); - Assert.assertEquals(new DateTime("2000"), resultValue.getMaxIngestedEventTime()); + Assert.assertEquals(DateTimes.of("2000"), resultValue.getMaxIngestedEventTime()); } } diff --git a/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java b/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java index 5c09723c8a9..fb811984298 100644 --- a/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java +++ b/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java @@ -20,9 +20,9 @@ package io.druid.query.expression; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.math.expr.Expr; import io.druid.math.expr.Parser; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -32,7 +32,7 @@ public class ExprMacroTest { private static final Expr.ObjectBinding BINDINGS = Parser.withMap( ImmutableMap.builder() - .put("t", new DateTime("2000-02-03T04:05:06").getMillis()) + .put("t", DateTimes.of("2000-02-03T04:05:06").getMillis()) .put("tstr", "2000-02-03T04:05:06") .put("tstr_sql", "2000-02-03 04:05:06") .put("x", "foo") @@ -82,28 +82,28 @@ public class ExprMacroTest @Test public void testTimestampCeil() { - assertExpr("timestamp_ceil(t, 'P1M')", new DateTime("2000-03-01").getMillis()); - assertExpr("timestamp_ceil(t, 'P1D','','America/Los_Angeles')", new DateTime("2000-02-03T08").getMillis()); - assertExpr("timestamp_ceil(t, 'P1D','',CityOfAngels)", new DateTime("2000-02-03T08").getMillis()); - assertExpr("timestamp_ceil(t, 'P1D','1970-01-01T01','Etc/UTC')", new DateTime("2000-02-04T01").getMillis()); + assertExpr("timestamp_ceil(t, 'P1M')", DateTimes.of("2000-03-01").getMillis()); + assertExpr("timestamp_ceil(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-03T08").getMillis()); + assertExpr("timestamp_ceil(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-03T08").getMillis()); + assertExpr("timestamp_ceil(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-04T01").getMillis()); } @Test public void testTimestampFloor() { - assertExpr("timestamp_floor(t, 'P1M')", new DateTime("2000-02-01").getMillis()); - assertExpr("timestamp_floor(t, 'P1D','','America/Los_Angeles')", new DateTime("2000-02-02T08").getMillis()); - assertExpr("timestamp_floor(t, 'P1D','',CityOfAngels)", new DateTime("2000-02-02T08").getMillis()); - assertExpr("timestamp_floor(t, 'P1D','1970-01-01T01','Etc/UTC')", new DateTime("2000-02-03T01").getMillis()); + assertExpr("timestamp_floor(t, 'P1M')", DateTimes.of("2000-02-01").getMillis()); + assertExpr("timestamp_floor(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-02T08").getMillis()); + assertExpr("timestamp_floor(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-02T08").getMillis()); + assertExpr("timestamp_floor(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-03T01").getMillis()); } @Test public void testTimestampShift() { - assertExpr("timestamp_shift(t, 'P1D', 2)", new DateTime("2000-02-05T04:05:06").getMillis()); - assertExpr("timestamp_shift(t, 'P1D', 2, 'America/Los_Angeles')", new DateTime("2000-02-05T04:05:06").getMillis()); - assertExpr("timestamp_shift(t, 'P1D', 2, CityOfAngels)", new DateTime("2000-02-05T04:05:06").getMillis()); - assertExpr("timestamp_shift(t, 'P1D', 2, '-08:00')", new DateTime("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2)", DateTimes.of("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2, 'America/Los_Angeles')", DateTimes.of("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2, CityOfAngels)", DateTimes.of("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2, '-08:00')", DateTimes.of("2000-02-05T04:05:06").getMillis()); } @Test @@ -118,12 +118,12 @@ public class ExprMacroTest @Test public void testTimestampParse() { - assertExpr("timestamp_parse(tstr)", new DateTime("2000-02-03T04:05:06").getMillis()); + assertExpr("timestamp_parse(tstr)", DateTimes.of("2000-02-03T04:05:06").getMillis()); assertExpr("timestamp_parse(tstr_sql)", null); - assertExpr("timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss')", new DateTime("2000-02-03T04:05:06").getMillis()); + assertExpr("timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss')", DateTimes.of("2000-02-03T04:05:06").getMillis()); assertExpr( "timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss','America/Los_Angeles')", - new DateTime("2000-02-03T04:05:06-08:00").getMillis() + DateTimes.of("2000-02-03T04:05:06-08:00").getMillis() ); } diff --git a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java index b2bfad510c0..dc4494dadbd 100644 --- a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java @@ -24,8 +24,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.js.JavaScriptConfig; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -77,7 +77,7 @@ public class JavaScriptExtractionFnTest public void testTimeExample() throws Exception { String utcHour = "function(t) {\nreturn 'Second ' + Math.floor((t % 60000) / 1000);\n}"; - final long millis = new DateTime("2015-01-02T13:00:59.999Z").getMillis(); + final long millis = DateTimes.of("2015-01-02T13:00:59.999Z").getMillis(); Assert.assertEquals("Second 59", new JavaScriptExtractionFn(utcHour, false, JavaScriptConfig.getEnabledInstance()).apply(millis)); } diff --git a/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java index 2a608b6923c..83b416f486f 100644 --- a/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java @@ -21,9 +21,9 @@ package io.druid.query.extraction; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Assert; import org.junit.Test; @@ -34,12 +34,12 @@ public class TimeFormatExtractionFnTest { private static final long[] timestamps = { - new DateTime("2015-01-01T23:00:00Z").getMillis(), - new DateTime("2015-01-02T23:00:00Z").getMillis(), - new DateTime("2015-03-03T23:00:00Z").getMillis(), - new DateTime("2015-03-04T23:00:00Z").getMillis(), - new DateTime("2015-05-02T23:00:00Z").getMillis(), - new DateTime("2015-12-21T23:00:00Z").getMillis() + DateTimes.of("2015-01-01T23:00:00Z").getMillis(), + DateTimes.of("2015-01-02T23:00:00Z").getMillis(), + DateTimes.of("2015-03-03T23:00:00Z").getMillis(), + DateTimes.of("2015-03-04T23:00:00Z").getMillis(), + DateTimes.of("2015-05-02T23:00:00Z").getMillis(), + DateTimes.of("2015-12-21T23:00:00Z").getMillis() }; @Test diff --git a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java index d335e770f27..30bfe9bcb28 100644 --- a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java +++ b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java @@ -23,12 +23,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableRangeSet; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; +import io.druid.java.util.common.Intervals; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.IdentityExtractionFn; import io.druid.query.ordering.StringComparators; import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.column.Column; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -66,8 +66,8 @@ public class GetDimensionRangeSetTest private final DimFilter interval1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -75,8 +75,8 @@ public class GetDimensionRangeSetTest private final DimFilter interval2 = new IntervalDimFilter( "dim1", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); diff --git a/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java b/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java index ad9aec4cb30..06538e40daa 100644 --- a/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java +++ b/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java @@ -24,9 +24,9 @@ import com.google.inject.Injector; import com.google.inject.Key; import io.druid.guice.GuiceInjectors; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.Intervals; import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.segment.column.Column; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -51,8 +51,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -63,8 +63,8 @@ public class IntervalDimFilterTest intervalFilter = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), new RegexDimExtractionFn(".*", false, null) ); @@ -80,8 +80,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -89,8 +89,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter2 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -100,16 +100,16 @@ public class IntervalDimFilterTest DimFilter intervalFilter3 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); DimFilter intervalFilter4 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); @@ -124,8 +124,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -133,8 +133,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter2 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); @@ -142,8 +142,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter3 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -154,9 +154,9 @@ public class IntervalDimFilterTest DimFilter intervalFilter4 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -166,8 +166,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter5 = new IntervalDimFilter( "__thyme", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -182,8 +182,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -191,8 +191,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter2 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); @@ -200,8 +200,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter3 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -212,9 +212,9 @@ public class IntervalDimFilterTest DimFilter intervalFilter4 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -223,8 +223,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter5 = new IntervalDimFilter( "__thyme", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); diff --git a/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java b/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java index ca14d9dd5a2..951c97d09d4 100644 --- a/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java +++ b/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java @@ -22,6 +22,7 @@ package io.druid.query.groupby; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.query.CachingEmitter; import io.druid.query.DefaultQueryMetricsTest; @@ -99,7 +100,7 @@ public class DefaultGroupByQueryMetricsTest Assert.assertEquals("", actualEvent.get("service")); Assert.assertEquals(QueryRunnerTestHelper.dataSource, actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); - Interval expectedInterval = new Interval("2011-04-02/2011-04-04"); + Interval expectedInterval = Intervals.of("2011-04-02/2011-04-04"); Assert.assertEquals(Collections.singletonList(expectedInterval.toString()), actualEvent.get(DruidMetrics.INTERVAL)); Assert.assertEquals("true", actualEvent.get("hasFilters")); Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration")); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java index 9611b0cff1e..1f3bcdf7946 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java @@ -40,6 +40,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -76,7 +77,6 @@ import io.druid.segment.column.ColumnConfig; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -311,7 +311,9 @@ public class GroupByMultiSegmentTest ), (QueryToolChest) toolChest ); - QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval(0, 1000000))); + QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec( + Collections.singletonList(Intervals.utc(0, 1000000)) + ); GroupByQuery query = GroupByQuery .builder() diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java index 8bc0140e215..eff05ca2033 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java @@ -21,9 +21,9 @@ package io.druid.query.groupby; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.segment.TestHelper; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -65,7 +65,7 @@ public class GroupByQueryConfigTest final GroupByQueryConfig config2 = config.withOverrides( GroupByQuery.builder() .setDataSource("test") - .setInterval(new Interval("2000/P1D")) + .setInterval(Intervals.of("2000/P1D")) .setGranularity(Granularities.ALL) .build() ); @@ -87,7 +87,7 @@ public class GroupByQueryConfigTest final GroupByQueryConfig config2 = config.withOverrides( GroupByQuery.builder() .setDataSource("test") - .setInterval(new Interval("2000/P1D")) + .setInterval(Intervals.of("2000/P1D")) .setGranularity(Granularities.ALL) .setContext( ImmutableMap.of( diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index ddef2a474c7..93268c1fb54 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -36,8 +36,10 @@ import io.druid.collections.DefaultBlockingPool; import io.druid.collections.NonBlockingPool; import io.druid.collections.StupidPool; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; @@ -127,7 +129,6 @@ import io.druid.segment.column.ValueType; import io.druid.segment.virtual.ExpressionVirtualColumn; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Assert; import org.junit.Ignore; @@ -2492,10 +2493,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -2803,10 +2804,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -3610,10 +3611,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -3828,7 +3829,7 @@ public class GroupByQueryRunnerTest new SelectorDimFilter("idx", "217", null) ) ), - new SelectorDimFilter("__time", String.valueOf(new DateTime("2011-04-01").getMillis()), null) + new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null) ) ) ); @@ -3943,10 +3944,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -4052,10 +4053,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -4278,8 +4279,8 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), - new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) @@ -4351,8 +4352,8 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), - new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) @@ -4425,7 +4426,7 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) @@ -6601,7 +6602,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -6613,7 +6614,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -6663,7 +6664,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -6675,7 +6676,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -6737,7 +6738,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -6749,7 +6750,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -7153,7 +7154,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -7165,7 +7166,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -8721,10 +8722,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -8805,10 +8806,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -8898,10 +8899,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -8995,10 +8996,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -9176,8 +9177,8 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), - new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java index f8f98e1280a..111d09e5759 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.FinalizeResultsQueryRunner; @@ -34,6 +35,7 @@ import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryToolChest; import io.druid.segment.column.Column; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import java.util.Arrays; import java.util.List; @@ -58,7 +60,7 @@ public class GroupByQueryRunnerTestHelper public static Row createExpectedRow(final String timestamp, Object... vals) { - return createExpectedRow(new DateTime(timestamp), vals); + return createExpectedRow(DateTimes.of(timestamp), vals); } public static Row createExpectedRow(final DateTime timestamp, Object... vals) @@ -70,8 +72,7 @@ public class GroupByQueryRunnerTestHelper theVals.put(vals[i].toString(), vals[i + 1]); } - DateTime ts = new DateTime(timestamp); - return new MapBasedRow(ts, theVals); + return new MapBasedRow(timestamp, theVals); } public static List createExpectedRows(String[] columnNames, Object[]... values) @@ -88,7 +89,7 @@ public class GroupByQueryRunnerTestHelper theVals.put(columnNames[i], value[i]); } } - expected.add(new MapBasedRow(new DateTime(value[timeIndex]), theVals)); + expected.add(new MapBasedRow(new DateTime(value[timeIndex], ISOChronology.getInstanceUTC()), theVals)); } return expected; } diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index 21e956594c8..bb7edc21069 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; @@ -154,8 +155,8 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest .descending(descending) .build(); - DateTime expectedEarliest = new DateTime("1970-01-01"); - DateTime expectedLast = new DateTime("2011-04-15"); + DateTime expectedEarliest = DateTimes.of("1970-01-01"); + DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable> results = Sequences.toList( runner.run(QueryPlus.wrap(query), CONTEXT), diff --git a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java index 03cdcd9cfae..2337d338379 100644 --- a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; @@ -39,7 +40,6 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.query.expression.TestExprMacroTable; import io.druid.query.ordering.StringComparators; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -278,7 +278,6 @@ public class DefaultLimitSpecTest theVals.put(vals[i].toString(), vals[i + 1]); } - DateTime ts = new DateTime(timestamp); - return new MapBasedRow(ts, theVals); + return new MapBasedRow(DateTimes.of(timestamp), theVals); } } diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java index 18ad069f0bc..76347a1fb87 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.CacheStrategy; import io.druid.query.TableDataSource; import io.druid.query.aggregation.AggregatorFactory; @@ -37,7 +38,6 @@ import io.druid.query.metadata.metadata.SegmentAnalysis; import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.spec.QuerySegmentSpecs; import io.druid.segment.column.ValueType; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -69,9 +69,7 @@ public class SegmentMetadataQueryQueryToolChestTest SegmentAnalysis result = new SegmentAnalysis( "testSegment", - ImmutableList.of( - new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z") - ), + ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "placement", new ColumnAnalysis( diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 899f65ad2b5..deaebcc734e 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -26,7 +26,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.granularity.Granularities; @@ -170,9 +170,7 @@ public class SegmentMetadataQueryTest expectedSegmentAnalysis1 = new SegmentAnalysis( id1, - ImmutableList.of( - new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z") - ), + ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "__time", new ColumnAnalysis( @@ -213,9 +211,7 @@ public class SegmentMetadataQueryTest ); expectedSegmentAnalysis2 = new SegmentAnalysis( id2, - ImmutableList.of( - new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z") - ), + ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "__time", new ColumnAnalysis( @@ -878,7 +874,10 @@ public class SegmentMetadataQueryTest Query query = MAPPER.readValue(queryStr, Query.class); Assert.assertTrue(query instanceof SegmentMetadataQuery); Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames())); - Assert.assertEquals(new Interval("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), query.getIntervals().get(0)); + Assert.assertEquals( + Intervals.of("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), + query.getIntervals().get(0) + ); Assert.assertEquals(expectedAnalysisTypes, ((SegmentMetadataQuery) query).getAnalysisTypes()); // test serialize and deserialize @@ -895,7 +894,7 @@ public class SegmentMetadataQueryTest Query query = MAPPER.readValue(queryStr, Query.class); Assert.assertTrue(query instanceof SegmentMetadataQuery); Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames())); - Assert.assertEquals(new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT), query.getIntervals().get(0)); + Assert.assertEquals(Intervals.ETERNITY, query.getIntervals().get(0)); Assert.assertTrue(((SegmentMetadataQuery) query).isUsingDefaultInterval()); // test serialize and deserialize @@ -910,14 +909,9 @@ public class SegmentMetadataQueryTest .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) .merge(true) .build(); - - Interval expectedInterval = new Interval( - JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT - ); - /* No interval specified, should use default interval */ Assert.assertTrue(testQuery.isUsingDefaultInterval()); - Assert.assertEquals(testQuery.getIntervals().get(0), expectedInterval); + Assert.assertEquals(Intervals.ETERNITY, testQuery.getIntervals().get(0)); Assert.assertEquals(testQuery.getIntervals().size(), 1); List testSegments = Arrays.asList( @@ -926,7 +920,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2012-01-01/P1D"); + return Intervals.of("2012-01-01/P1D"); } }, new LogicalSegment() @@ -934,7 +928,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2012-01-01T01/PT1H"); + return Intervals.of("2012-01-01T01/PT1H"); } }, new LogicalSegment() @@ -942,7 +936,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-05/P1D"); + return Intervals.of("2013-01-05/P1D"); } }, new LogicalSegment() @@ -950,7 +944,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-05-20/P1D"); + return Intervals.of("2013-05-20/P1D"); } }, new LogicalSegment() @@ -958,7 +952,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-01-05/P1D"); + return Intervals.of("2014-01-05/P1D"); } }, new LogicalSegment() @@ -966,7 +960,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-02-05/P1D"); + return Intervals.of("2014-02-05/P1D"); } }, new LogicalSegment() @@ -974,7 +968,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-19T01/PT1H"); + return Intervals.of("2015-01-19T01/PT1H"); } }, new LogicalSegment() @@ -982,7 +976,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-20T02/PT1H"); + return Intervals.of("2015-01-20T02/PT1H"); } } ); @@ -1001,7 +995,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-19T01/PT1H"); + return Intervals.of("2015-01-19T01/PT1H"); } }, new LogicalSegment() @@ -1009,7 +1003,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-20T02/PT1H"); + return Intervals.of("2015-01-20T02/PT1H"); } } ); @@ -1034,7 +1028,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-05-20/P1D"); + return Intervals.of("2013-05-20/P1D"); } }, new LogicalSegment() @@ -1042,7 +1036,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-01-05/P1D"); + return Intervals.of("2014-01-05/P1D"); } }, new LogicalSegment() @@ -1050,7 +1044,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-02-05/P1D"); + return Intervals.of("2014-02-05/P1D"); } }, new LogicalSegment() @@ -1058,7 +1052,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-19T01/PT1H"); + return Intervals.of("2015-01-19T01/PT1H"); } }, new LogicalSegment() @@ -1066,7 +1060,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-20T02/PT1H"); + return Intervals.of("2015-01-20T02/PT1H"); } } ); diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java index d2811e0bbd3..e6106c44fdb 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryPlus; @@ -38,7 +39,6 @@ import io.druid.segment.QueryableIndexSegment; import io.druid.segment.TestHelper; import io.druid.segment.TestIndex; import io.druid.segment.column.ValueType; -import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -98,7 +98,7 @@ public class SegmentMetadataUnionQueryTest { SegmentAnalysis expected = new SegmentAnalysis( QueryRunnerTestHelper.segmentId, - Lists.newArrayList(new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), + Lists.newArrayList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "placement", new ColumnAnalysis( diff --git a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java index 4c7d112b314..5ef096773bd 100644 --- a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java @@ -20,6 +20,7 @@ package io.druid.query.search; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.ordering.StringComparators; @@ -39,7 +40,7 @@ import java.util.List; */ public class SearchBinaryFnTest { - private final DateTime currTime = new DateTime(); + private final DateTime currTime = DateTimes.nowUtc(); private void assertSearchMergeResult(SearchResultValue o1, SearchResultValue o2) { diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java index 4db4c1a54dd..988eeb516b6 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java @@ -22,6 +22,8 @@ package io.druid.query.search; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.CacheStrategy; import io.druid.query.Druids; @@ -31,8 +33,6 @@ import io.druid.query.search.search.FragmentSearchQuerySpec; import io.druid.query.search.search.SearchHit; import io.druid.query.search.search.SearchQuery; import io.druid.query.spec.MultipleIntervalSegmentSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -49,13 +49,7 @@ public class SearchQueryQueryToolChestTest null, Granularities.ALL, 1, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")), new FragmentSearchQuerySpec(ImmutableList.of("a", "b")), null, @@ -64,11 +58,8 @@ public class SearchQueryQueryToolChestTest ); final Result result = new Result<>( - new DateTime(123L), new SearchResultValue( - ImmutableList.of( - new SearchHit("dim1", "a") - ) - ) + DateTimes.utc(123L), + new SearchResultValue(ImmutableList.of(new SearchHit("dim1", "a"))) ); Object preparedValue = strategy.prepareForCache().apply( diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index 1cd5b15374b..95691ceaaa7 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -23,6 +23,8 @@ import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -60,8 +62,6 @@ import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -171,10 +171,10 @@ public class SearchQueryRunnerTest ) { final QueryPlus> queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-01-12/2011-02-28"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-01-12/2011-02-28"))) ); final QueryPlus> queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-03-01/2011-04-15"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-03-01/2011-04-15"))) ); return Sequences.concat(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)); } @@ -746,7 +746,7 @@ public class SearchQueryRunnerTest IncrementalIndex index = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .build() ) .setMaxRowCount(10) @@ -820,7 +820,7 @@ public class SearchQueryRunnerTest ); List copy = Lists.newLinkedList(expectedResults); for (Result result : results) { - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), result.getTimestamp()); Assert.assertTrue(result.getValue() instanceof Iterable); Iterable resultValues = result.getValue(); diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java index 231d9103c27..a5ea8127f11 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.io.CharSource; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -41,7 +42,6 @@ import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexSegment; import io.druid.segment.TestIndex; import io.druid.segment.incremental.IncrementalIndex; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -249,7 +249,7 @@ public class SearchQueryRunnerWithCaseTest ); for (Result result : results) { - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), result.getTimestamp()); Assert.assertNotNull(result.getValue()); Iterable resultValues = result.getValue(); diff --git a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java index 9930097e2d1..f924626f23c 100644 --- a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java @@ -26,6 +26,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.CharSource; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -51,7 +53,6 @@ import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.SingleElementPartitionChunk; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Assert; @@ -153,7 +154,7 @@ public class MultiSegmentSelectQueryTest timeline.add(index2.getInterval(), "v2", new SingleElementPartitionChunk(segment_override)); segmentIdentifiers = Lists.newArrayList(); - for (TimelineObjectHolder holder : timeline.lookup(new Interval("2011-01-12/2011-01-14"))) { + for (TimelineObjectHolder holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-14"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion())); } @@ -184,7 +185,7 @@ public class MultiSegmentSelectQueryTest private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) + .withMinTimestamp(DateTimes.of(minTimeStamp).getMillis()) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); diff --git a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java index aa7f5eafe29..bc8f0ef0c87 100644 --- a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java @@ -23,10 +23,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -50,7 +50,7 @@ public class SelectBinaryFnTest SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); Result res1 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("first", "fourth"), @@ -61,7 +61,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "first" ) @@ -71,7 +71,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T03"), + DateTimes.of("2013-01-01T03"), "dim", "fourth" ) @@ -81,7 +81,7 @@ public class SelectBinaryFnTest 2, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T05"), + DateTimes.of("2013-01-01T05"), "dim", "sixth" ) @@ -92,7 +92,7 @@ public class SelectBinaryFnTest Result res2 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("second", "third"), @@ -103,7 +103,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "second" ) @@ -113,7 +113,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T02"), + DateTimes.of("2013-01-01T02"), "dim", "third" ) @@ -123,7 +123,7 @@ public class SelectBinaryFnTest 2, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T04"), + DateTimes.of("2013-01-01T04"), "dim", "fifth" ) @@ -159,7 +159,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), "dim", "first" + DateTimes.of("2013-01-01T00"), "dim", "first" ) ), new EventHolder( @@ -167,7 +167,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "second" ) @@ -177,7 +177,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T02"), + DateTimes.of("2013-01-01T02"), "dim", "third" ) @@ -187,7 +187,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T03"), + DateTimes.of("2013-01-01T03"), "dim", "fourth" ) @@ -197,7 +197,7 @@ public class SelectBinaryFnTest 2, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T04"), + DateTimes.of("2013-01-01T04"), "dim", "fifth" ) @@ -216,7 +216,7 @@ public class SelectBinaryFnTest SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); Result res1 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("first", "second", "fourth"), @@ -227,14 +227,14 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), "dim", "first" + DateTimes.of("2013-01-01T00"), "dim", "first" ) )) ) ); Result res2 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("third", "second", "fifth"), @@ -245,7 +245,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "second" ) diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 0fdeb9e9d65..3b598d4da2b 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -29,7 +29,9 @@ import com.google.common.collect.Maps; import com.google.common.collect.ObjectArrays; import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequences; import io.druid.js.JavaScriptConfig; import io.druid.query.Druids; @@ -57,7 +59,7 @@ import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.virtual.ExpressionVirtualColumn; import org.joda.time.DateTime; -import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -108,9 +110,7 @@ public class SelectQueryRunnerTest "2011-01-13T00:00:00.000Z\tupfront\tpremium\t1600\t16000.0\t160000\tpreferred\tppreferred\t1564.617729\tvalue" }; - public static final QuerySegmentSpec I_0112_0114 = new LegacySegmentSpec( - new Interval("2011-01-12/2011-01-14") - ); + public static final QuerySegmentSpec I_0112_0114 = new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")); public static final String[] V_0112_0114 = ObjectArrays.concat(V_0112, V_0113, String.class); private static final boolean DEFAULT_FROM_NEXT = true; @@ -282,7 +282,7 @@ public class SelectQueryRunnerTest List> expectedResultsAsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("mar", "qual", "place"), @@ -292,7 +292,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "automotive0") .put("place", "preferred") @@ -303,7 +303,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "business0") .put("place", "preferred") @@ -314,7 +314,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "entertainment0") .put("place", "preferred") @@ -328,7 +328,7 @@ public class SelectQueryRunnerTest List> expectedResultsDsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("mar", "qual", "place"), @@ -338,7 +338,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "upfront") .put("qual", "premium0") .put("place", "preferred") @@ -349,7 +349,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "upfront") .put("qual", "mezzanine0") .put("place", "preferred") @@ -360,7 +360,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "total_market") .put("qual", "premium0") .put("place", "preferred") @@ -649,7 +649,7 @@ public class SelectQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet( @@ -737,7 +737,7 @@ public class SelectQueryRunnerTest List> expectedResultsAsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -747,7 +747,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -758,7 +758,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -769,7 +769,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -783,7 +783,7 @@ public class SelectQueryRunnerTest List> expectedResultsDsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -793,7 +793,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 1564.6177f) .put(QueryRunnerTestHelper.indexMetric, 1564.6177f) @@ -804,7 +804,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 826.0602f) .put(QueryRunnerTestHelper.indexMetric, 826.0602f) @@ -815,7 +815,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 1689.0128f) .put(QueryRunnerTestHelper.indexMetric, 1689.0128f) @@ -855,7 +855,7 @@ public class SelectQueryRunnerTest List> expectedResultsAsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -865,7 +865,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -876,7 +876,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -887,7 +887,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -901,7 +901,7 @@ public class SelectQueryRunnerTest List> expectedResultsDsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -911,7 +911,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-1564.617729") .put(QueryRunnerTestHelper.indexMetric, 1564.6177f) @@ -922,7 +922,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-826.060182") .put(QueryRunnerTestHelper.indexMetric, 826.0602f) @@ -933,7 +933,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-1689.012875") .put(QueryRunnerTestHelper.indexMetric, 1689.0128f) @@ -993,7 +993,7 @@ public class SelectQueryRunnerTest event.put( specs[0], specs.length == 1 || specs[1].equals("STRING") ? values[i] : - specs[1].equals("TIME") ? new DateTime(values[i]) : + specs[1].equals("TIME") ? DateTimes.of(values[i]) : specs[1].equals("FLOAT") ? Float.valueOf(values[i]) : specs[1].equals("DOUBLE") ? Double.valueOf(values[i]) : specs[1].equals("LONG") ? Long.valueOf(values[i]) : @@ -1042,7 +1042,7 @@ public class SelectQueryRunnerTest int lastOffset = holders.isEmpty() ? offset : holders.get(holders.size() - 1).getOffset(); expected.add( new Result( - new DateTime(group.get(0).get(EventHolder.timestampKey)), + new DateTime(group.get(0).get(EventHolder.timestampKey), ISOChronology.getInstanceUTC()), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, lastOffset), Sets.newHashSet(dimensions), diff --git a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java index 96bb12a3817..6f352392e8f 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java @@ -22,11 +22,11 @@ package io.druid.query.select; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.TableDataSource; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.spec.LegacySegmentSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -77,7 +77,7 @@ public class SelectQuerySpecTest SelectQuery query = new SelectQuery( new TableDataSource(QueryRunnerTestHelper.dataSource), - new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), + new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, @@ -126,7 +126,7 @@ public class SelectQuerySpecTest SelectQuery queryWithNull = new SelectQuery( new TableDataSource(QueryRunnerTestHelper.dataSource), - new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), + new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, diff --git a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java index 5c92b5e5275..fb5082c22a4 100644 --- a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java +++ b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.SegmentDescriptor; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -44,7 +44,7 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof LegacySegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-10-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-10-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); } @@ -57,7 +57,7 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof LegacySegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-09-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-09-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); } @@ -71,7 +71,7 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof MultipleIntervalSegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-08-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-08-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); } @@ -106,14 +106,14 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof MultipleSpecificSegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-07-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-07-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); Assert.assertEquals( ImmutableList.of( - new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 0), - new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 1), - new SegmentDescriptor(new Interval("2011-11-01/2011-11-10"), "2", 10) + new SegmentDescriptor(Intervals.of("2011-07-01/2011-10-10"), "1", 0), + new SegmentDescriptor(Intervals.of("2011-07-01/2011-10-10"), "1", 1), + new SegmentDescriptor(Intervals.of("2011-11-01/2011-11-10"), "2", 10) ), ((MultipleSpecificSegmentSpec) spec).getDescriptors() ); diff --git a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java index 76591d64405..f7f4dd0b12d 100644 --- a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java @@ -25,6 +25,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; @@ -43,8 +45,6 @@ import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesResultBuilder; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.SegmentMissingException; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -60,7 +60,7 @@ public class SpecificSegmentQueryRunnerTest { final ObjectMapper mapper = new DefaultObjectMapper(); SegmentDescriptor descriptor = new SegmentDescriptor( - new Interval("2012-01-01T00:00:00Z/P1D"), + Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0 ); @@ -100,7 +100,7 @@ public class SpecificSegmentQueryRunnerTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) + .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( new CountAggregatorFactory("rows") @@ -135,13 +135,13 @@ public class SpecificSegmentQueryRunnerTest { final ObjectMapper mapper = new DefaultObjectMapper(); SegmentDescriptor descriptor = new SegmentDescriptor( - new Interval("2012-01-01T00:00:00Z/P1D"), + Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0 ); TimeseriesResultBuilder builder = new TimeseriesResultBuilder( - new DateTime("2012-01-01T00:00:00Z") + DateTimes.of("2012-01-01T00:00:00Z") ); CountAggregator rows = new CountAggregator(); rows.aggregate(); @@ -177,7 +177,7 @@ public class SpecificSegmentQueryRunnerTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) + .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( new CountAggregatorFactory("rows") diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java index 843083b8128..1742fa41107 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java @@ -23,13 +23,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.query.CacheStrategy; import io.druid.query.Druids; import io.druid.query.Result; import io.druid.query.TableDataSource; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.timeline.LogicalSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -89,25 +90,25 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( TIME_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); Assert.assertEquals(6, segments.size()); List expected = Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ); for (int i = 0; i < segments.size(); i++) { @@ -121,22 +122,22 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( MAXTIME_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); Assert.assertEquals(3, segments.size()); List expected = Arrays.asList( - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ); for (int i = 0; i < segments.size(); i++) { @@ -150,22 +151,22 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( MINTIME_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); Assert.assertEquals(3, segments.size()); List expected = Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")) ); for (int i = 0; i < segments.size(); i++) { @@ -179,13 +180,13 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( FILTERED_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); @@ -198,13 +199,7 @@ public class TimeBoundaryQueryQueryToolChestTest new TimeBoundaryQueryQueryToolChest().getCacheStrategy( new TimeBoundaryQuery( new TableDataSource("dummy"), - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, null, null @@ -212,10 +207,10 @@ public class TimeBoundaryQueryQueryToolChestTest ); final Result result = new Result<>( - new DateTime(123L), new TimeBoundaryResultValue( + DateTimes.utc(123L), new TimeBoundaryResultValue( ImmutableMap.of( - TimeBoundaryQuery.MIN_TIME, new DateTime(0L).toString(), - TimeBoundaryQuery.MAX_TIME, new DateTime("2015-01-01").toString() + TimeBoundaryQuery.MIN_TIME, DateTimes.EPOCH.toString(), + TimeBoundaryQuery.MAX_TIME, DateTimes.of("2015-01-01").toString() ) ) ); diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 7e7273ab375..245ae21007f 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -24,6 +24,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.io.CharSource; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -115,7 +117,7 @@ public class TimeBoundaryQueryRunnerTest private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) + .withMinTimestamp(DateTimes.of(minTimeStamp).getMillis()) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); @@ -157,7 +159,7 @@ public class TimeBoundaryQueryRunnerTest timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk(segment1)); segmentIdentifiers = Lists.newArrayList(); - for (TimelineObjectHolder holder : timeline.lookup(new Interval("2011-01-12/2011-01-17"))) { + for (TimelineObjectHolder holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-17"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion())); } @@ -186,8 +188,8 @@ public class TimeBoundaryQueryRunnerTest DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); - Assert.assertEquals(new DateTime("2011-01-13T00:00:00.000Z"), minTime); - Assert.assertEquals(new DateTime("2011-01-16T00:00:00.000Z"), maxTime); + Assert.assertEquals(DateTimes.of("2011-01-13T00:00:00.000Z"), minTime); + Assert.assertEquals(DateTimes.of("2011-01-16T00:00:00.000Z"), maxTime); } @Test @@ -226,8 +228,8 @@ public class TimeBoundaryQueryRunnerTest DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime); - Assert.assertEquals(new DateTime("2011-04-15T00:00:00.000Z"), maxTime); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), minTime); + Assert.assertEquals(DateTimes.of("2011-04-15T00:00:00.000Z"), maxTime); } @Test @@ -249,7 +251,7 @@ public class TimeBoundaryQueryRunnerTest DateTime maxTime = val.getMaxTime(); Assert.assertNull(minTime); - Assert.assertEquals(new DateTime("2011-04-15T00:00:00.000Z"), maxTime); + Assert.assertEquals(DateTimes.of("2011-04-15T00:00:00.000Z"), maxTime); } @Test @@ -270,7 +272,7 @@ public class TimeBoundaryQueryRunnerTest DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), minTime); Assert.assertNull(maxTime); } @@ -279,7 +281,7 @@ public class TimeBoundaryQueryRunnerTest { List> results = Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeBoundaryResultValue( ImmutableMap.of( "maxTime", "2012-01-01", @@ -288,7 +290,7 @@ public class TimeBoundaryQueryRunnerTest ) ), new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeBoundaryResultValue( ImmutableMap.of( "maxTime", "2012-02-01", @@ -301,7 +303,7 @@ public class TimeBoundaryQueryRunnerTest TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null, null); Iterable> actual = query.mergeResults(results); - Assert.assertTrue(actual.iterator().next().getValue().getMaxTime().equals(new DateTime("2012-02-01"))); + Assert.assertTrue(actual.iterator().next().getValue().getMaxTime().equals(DateTimes.of("2012-02-01"))); } @Test diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index 80464169b61..0fbdb3d039b 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -22,6 +22,7 @@ package io.druid.query.timeseries; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -36,7 +37,6 @@ import io.druid.query.UnionQueryRunner; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -108,13 +108,13 @@ public class TimeSeriesUnionQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 52L, "idx", 26476L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 52L, "idx", 23308L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -157,25 +157,25 @@ public class TimeSeriesUnionQueryRunnerTest QueryToolChest toolChest = new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()); final List> ds1 = Lists.newArrayList( new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 1L, "idx", 2L)) ), new Result<>( - new DateTime("2011-04-03"), + DateTimes.of("2011-04-03"), new TimeseriesResultValue(ImmutableMap.of("rows", 3L, "idx", 4L)) ) ); final List> ds2 = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 5L, "idx", 6L)) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 7L, "idx", 8L)) ), new Result<>( - new DateTime("2011-04-04"), + DateTimes.of("2011-04-04"), new TimeseriesResultValue(ImmutableMap.of("rows", 9L, "idx", 10L)) ) ); @@ -202,25 +202,25 @@ public class TimeSeriesUnionQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 5L, "idx", 6L) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 8L, "idx", 10L) ) ), new Result<>( - new DateTime("2011-04-03"), + DateTimes.of("2011-04-03"), new TimeseriesResultValue( ImmutableMap.of("rows", 3L, "idx", 4L) ) ), new Result<>( - new DateTime("2011-04-04"), + DateTimes.of("2011-04-04"), new TimeseriesResultValue( ImmutableMap.of("rows", 9L, "idx", 10L) ) diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java index 7d70579b79d..0a4e5ae826a 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java @@ -20,6 +20,7 @@ package io.druid.query.timeseries; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; @@ -42,7 +43,7 @@ public class TimeseriesBinaryFnTest rowsCount, indexLongSum ); - final DateTime currTime = new DateTime(); + final DateTime currTime = DateTimes.nowUtc(); @Test public void testMerge() diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java index 38c315433e4..b23cb0fe28f 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java @@ -22,6 +22,8 @@ package io.druid.query.timeseries; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.CacheStrategy; import io.druid.query.Druids; @@ -33,8 +35,6 @@ import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.segment.TestHelper; import io.druid.segment.VirtualColumns; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -68,13 +68,7 @@ public class TimeseriesQueryQueryToolChestTest TOOL_CHEST.getCacheStrategy( new TimeseriesQuery( new TableDataSource("dummy"), - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), descending, VirtualColumns.EMPTY, null, @@ -90,7 +84,7 @@ public class TimeseriesQueryQueryToolChestTest final Result result = new Result<>( // test timestamps that result in integer size millis - new DateTime(123L), + DateTimes.utc(123L), new TimeseriesResultValue( ImmutableMap.of("metric1", 2, "metric0", 3) ) diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 526751ba929..8b218d29550 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -23,6 +23,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -39,8 +41,6 @@ import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.Segment; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -73,7 +73,7 @@ public class TimeseriesQueryRunnerBonusTest final IncrementalIndex oneRowIndex = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2012-01-01T00:00:00Z").getMillis()) + .withMinTimestamp(DateTimes.of("2012-01-01T00:00:00Z").getMillis()) .build() ) .setMaxRowCount(1000) @@ -83,7 +83,7 @@ public class TimeseriesQueryRunnerBonusTest oneRowIndex.add( new MapBasedInputRow( - new DateTime("2012-01-01T00:00:00Z").getMillis(), + DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "x") ) @@ -93,12 +93,12 @@ public class TimeseriesQueryRunnerBonusTest Assert.assertEquals("index size", 1, oneRowIndex.size()); Assert.assertEquals("result size", 1, results.size()); - Assert.assertEquals("result timestamp", new DateTime("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); + Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); Assert.assertEquals("result count metric", 1, (long) results.get(0).getValue().getLongMetric("rows")); oneRowIndex.add( new MapBasedInputRow( - new DateTime("2012-01-01T00:00:00Z").getMillis(), + DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "y") ) @@ -108,7 +108,7 @@ public class TimeseriesQueryRunnerBonusTest Assert.assertEquals("index size", 2, oneRowIndex.size()); Assert.assertEquals("result size", 1, results.size()); - Assert.assertEquals("result timestamp", new DateTime("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); + Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); Assert.assertEquals("result count metric", 2, (long) results.get(0).getValue().getLongMetric("rows")); } @@ -129,7 +129,7 @@ public class TimeseriesQueryRunnerBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) + .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( new CountAggregatorFactory("rows") diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 2c98d74facf..8d7b57056fb 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -23,8 +23,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import io.druid.java.util.common.StringUtils; import com.google.common.primitives.Doubles; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -146,7 +148,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = ImmutableList.of( new Result<>( - new DateTime("2020-04-02"), + DateTimes.of("2020-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -289,8 +291,8 @@ public class TimeseriesQueryRunnerTest .descending(descending) .build(); - DateTime expectedEarliest = new DateTime("2011-01-12"); - DateTime expectedLast = new DateTime("2011-04-15"); + DateTime expectedEarliest = DateTimes.of("2011-01-12"); + DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable> results = Sequences.toList( runner.run(QueryPlus.wrap(query), CONTEXT), @@ -392,13 +394,13 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -440,13 +442,13 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -517,9 +519,7 @@ public class TimeseriesQueryRunnerTest .granularity(new PeriodGranularity(new Period("P1M"), null, null)) .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -537,7 +537,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults1 = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -555,9 +555,7 @@ public class TimeseriesQueryRunnerTest .granularity("DAY") .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -574,7 +572,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults2 = Collections.singletonList( new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -603,9 +601,7 @@ public class TimeseriesQueryRunnerTest ) .intervals( Collections.singletonList( - new Interval( - "2011-01-12T00:00:00.000-08:00/2011-01-20T00:00:00.000-08:00" - ) + Intervals.of("2011-01-12T00:00:00.000-08:00/2011-01-20T00:00:00.000-08:00") ) ) .aggregators( @@ -651,9 +647,7 @@ public class TimeseriesQueryRunnerTest .granularity(Granularities.HOUR) .intervals( Collections.singletonList( - new Interval( - "2011-04-14T00:00:00.000Z/2011-05-01T00:00:00.000Z" - ) + Intervals.of("2011-04-14T00:00:00.000Z/2011-05-01T00:00:00.000Z") ) ) .aggregators( @@ -669,7 +663,9 @@ public class TimeseriesQueryRunnerTest .build(); List> lotsOfZeroes = Lists.newArrayList(); - final Iterable iterable = Granularities.HOUR.getIterable(new Interval(new DateTime("2011-04-14T01").getMillis(), new DateTime("2011-04-15").getMillis())); + final Iterable iterable = Granularities.HOUR.getIterable( + new Interval(DateTimes.of("2011-04-14T01"), DateTimes.of("2011-04-15")) + ); for (Interval interval : iterable) { lotsOfZeroes.add( new Result<>( @@ -685,7 +681,7 @@ public class TimeseriesQueryRunnerTest Iterables.concat( Collections.singletonList( new Result<>( - new DateTime("2011-04-14T00"), + DateTimes.of("2011-04-14T00"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 4907L) ) @@ -694,7 +690,7 @@ public class TimeseriesQueryRunnerTest lotsOfZeroes, Collections.singletonList( new Result<>( - new DateTime("2011-04-15T00"), + DateTimes.of("2011-04-15T00"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 4717L) ) @@ -719,17 +715,11 @@ public class TimeseriesQueryRunnerTest .granularity( new PeriodGranularity( new Period("PT1H"), - new DateTime(60000), + DateTimes.utc(60000), DateTimeZone.UTC ) ) - .intervals( - Collections.singletonList( - new Interval( - "2011-04-15T00:00:00.000Z/2012" - ) - ) - ) + .intervals(Collections.singletonList(Intervals.of("2011-04-15T00:00:00.000Z/2012"))) .aggregators( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -744,7 +734,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults1 = Collections.singletonList( new Result<>( - new DateTime("2011-04-14T23:01Z"), + DateTimes.of("2011-04-14T23:01Z"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 4717L) ) @@ -767,9 +757,7 @@ public class TimeseriesQueryRunnerTest .granularity(new PeriodGranularity(new Period("P1M"), null, null)) .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -787,7 +775,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults1 = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -805,9 +793,7 @@ public class TimeseriesQueryRunnerTest .granularity("DAY") .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -824,7 +810,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults2 = Collections.singletonList( new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -846,11 +832,7 @@ public class TimeseriesQueryRunnerTest .granularity(QueryRunnerTestHelper.dayGran) .intervals( new MultipleIntervalSegmentSpec( - Collections.singletonList( - new Interval( - "2015-01-01/2015-01-10" - ) - ) + Collections.singletonList(Intervals.of("2015-01-01/2015-01-10")) ) ) .aggregators( @@ -896,7 +878,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -907,7 +889,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -947,7 +929,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, @@ -958,7 +940,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, @@ -999,7 +981,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -1010,7 +992,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -1051,7 +1033,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1062,7 +1044,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1103,7 +1085,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1114,7 +1096,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1163,7 +1145,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1174,7 +1156,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1223,7 +1205,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1234,7 +1216,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1283,7 +1265,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1294,7 +1276,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1341,7 +1323,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1352,7 +1334,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1405,7 +1387,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1416,7 +1398,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1469,7 +1451,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1480,7 +1462,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1515,7 +1497,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1526,7 +1508,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1585,7 +1567,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1596,7 +1578,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1631,7 +1613,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1642,7 +1624,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1677,7 +1659,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1688,7 +1670,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1737,7 +1719,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1748,7 +1730,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1786,11 +1768,7 @@ public class TimeseriesQueryRunnerTest Iterable> expectedResults = ImmutableList.of( new Result<>( - new DateTime( - QueryRunnerTestHelper.firstToThird.getIntervals() - .get(0) - .getStart() - ), + QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), new TimeseriesResultValue( ImmutableMap.of( "index", 12459.361190795898d, @@ -1828,11 +1806,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = ImmutableList.of( new Result<>( - new DateTime( - QueryRunnerTestHelper.firstToThird.getIntervals() - .get(0) - .getStart() - ), + QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), new TimeseriesResultValue( ImmutableMap.of( "index", 283.31103515625d, @@ -1871,7 +1845,7 @@ public class TimeseriesQueryRunnerTest // to select different value from the list of first and last dates List> expectedAscendingResults = ImmutableList.of( new Result<>( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(100.000000).doubleValue(), @@ -1880,7 +1854,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01"), + DateTimes.of("2011-02-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(132.123776).doubleValue(), @@ -1889,7 +1863,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01"), + DateTimes.of("2011-03-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(153.059937).doubleValue(), @@ -1898,7 +1872,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(135.885094).doubleValue(), @@ -1910,7 +1884,7 @@ public class TimeseriesQueryRunnerTest List> expectedDescendingResults = ImmutableList.of( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(1234.247546).doubleValue(), @@ -1919,7 +1893,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01"), + DateTimes.of("2011-03-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(1004.940887).doubleValue(), @@ -1928,7 +1902,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01"), + DateTimes.of("2011-02-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(913.561076).doubleValue(), @@ -1937,7 +1911,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(800.000000).doubleValue(), @@ -2191,7 +2165,7 @@ public class TimeseriesQueryRunnerTest ); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 18L, @@ -2242,7 +2216,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 0L, @@ -2293,7 +2267,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 26L, @@ -2345,7 +2319,7 @@ public class TimeseriesQueryRunnerTest ); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 26L, @@ -2397,7 +2371,7 @@ public class TimeseriesQueryRunnerTest ); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 26L, @@ -2432,7 +2406,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", @@ -2510,7 +2484,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -2521,7 +2495,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -2568,7 +2542,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, @@ -2579,7 +2553,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java index edaa2cf6e8a..cd99923b4af 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java @@ -23,6 +23,7 @@ import com.google.caliper.Param; import com.google.caliper.Runner; import com.google.caliper.SimpleBenchmark; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; @@ -82,7 +83,7 @@ public class TopNBinaryFnBenchmark extends SimpleBenchmark ) ); } - final DateTime currTime = new DateTime(); + final DateTime currTime = DateTimes.nowUtc(); List> list = new ArrayList<>(); for (int i = 0; i < threshold; i++) { Map res = new HashMap<>(); diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java index c45d2f62739..be0917b904c 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java @@ -22,6 +22,7 @@ package io.druid.query.topn; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; @@ -64,7 +65,7 @@ public class TopNBinaryFnTest final List postAggregators = Arrays.asList( addrowsindexconstant ); - private final DateTime currTime = new DateTime(); + private final DateTime currTime = DateTimes.nowUtc(); private void assertTopNMergeResult(Object o1, Object o2) { diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java index 43a8dace166..e1d570bf1dd 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.query.CacheStrategy; @@ -45,8 +47,6 @@ import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.TestHelper; import io.druid.segment.TestIndex; import io.druid.segment.VirtualColumns; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -69,13 +69,7 @@ public class TopNQueryQueryToolChestTest new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("metric1"), 3, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), @@ -86,7 +80,7 @@ public class TopNQueryQueryToolChestTest final Result result = new Result<>( // test timestamps that result in integer size millis - new DateTime(123L), + DateTimes.utc(123L), new TopNResultValue( Arrays.asList( ImmutableMap.of( @@ -121,13 +115,7 @@ public class TopNQueryQueryToolChestTest new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), @@ -141,13 +129,7 @@ public class TopNQueryQueryToolChestTest new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 112522f8895..13c83489629 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -31,8 +31,10 @@ import com.google.common.collect.Sets; import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import io.druid.collections.StupidPool; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -92,8 +94,6 @@ import io.druid.segment.TestHelper; import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.virtual.ExpressionVirtualColumn; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -298,7 +298,7 @@ public class TopNQueryRunnerTest List> expectedResults = ImmutableList.of( new Result<>( - new DateTime("2020-04-02T00:00:00.000Z"), + DateTimes.of("2020-04-02T00:00:00.000Z"), new TopNResultValue(ImmutableList.of()) ) ); @@ -331,7 +331,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -403,7 +403,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -475,7 +475,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -538,7 +538,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -592,7 +592,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -637,7 +637,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -687,7 +687,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -732,7 +732,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -754,7 +754,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -776,7 +776,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -798,7 +798,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -844,7 +844,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -866,7 +866,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -888,7 +888,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -910,7 +910,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -957,7 +957,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -979,7 +979,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1001,7 +1001,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1023,7 +1023,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1068,7 +1068,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1090,7 +1090,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1112,7 +1112,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1134,7 +1134,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1182,7 +1182,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1287,7 +1287,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1335,7 +1335,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1383,7 +1383,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1431,7 +1431,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1472,7 +1472,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1506,7 +1506,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1549,7 +1549,7 @@ public class TopNQueryRunnerTest .threshold(4) .intervals( new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2011-04-01T00:00:00.000Z/2011-04-02T00:00:00.000Z")) + Arrays.asList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-02T00:00:00.000Z")) ) ) .aggregators(commonAggregators) @@ -1558,7 +1558,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1606,7 +1606,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1648,7 +1648,7 @@ public class TopNQueryRunnerTest assertExpectedResults( Lists.>newArrayList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Lists.>newArrayList()) ) ), query @@ -1685,7 +1685,7 @@ public class TopNQueryRunnerTest assertExpectedResults( Lists.>newArrayList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Lists.>newArrayList()) ) ), query @@ -1777,7 +1777,7 @@ public class TopNQueryRunnerTest final ArrayList> expectedResults = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1818,7 +1818,7 @@ public class TopNQueryRunnerTest final ArrayList> expectedResults = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1866,7 +1866,7 @@ public class TopNQueryRunnerTest final ArrayList> expectedResults = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1920,7 +1920,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Collections.>singletonList( QueryRunnerTestHelper.orderedMap( @@ -1954,7 +1954,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Collections.>singletonList( QueryRunnerTestHelper.orderedMap( @@ -1988,7 +1988,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Collections.>singletonList( QueryRunnerTestHelper.orderedMap( @@ -2021,7 +2021,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2066,7 +2066,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2101,7 +2101,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2141,7 +2141,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2181,7 +2181,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2221,7 +2221,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2270,7 +2270,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2319,7 +2319,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2384,7 +2384,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2435,7 +2435,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2486,7 +2486,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2550,7 +2550,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2615,7 +2615,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2682,7 +2682,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2748,7 +2748,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2814,7 +2814,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2881,7 +2881,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2934,7 +2934,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2987,7 +2987,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3040,7 +3040,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3110,7 +3110,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3157,7 +3157,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3203,7 +3203,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3277,7 +3277,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3365,7 +3365,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3414,7 +3414,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3468,7 +3468,7 @@ public class TopNQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.>asList( @@ -3525,7 +3525,7 @@ public class TopNQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Collections.singletonList( @@ -3575,7 +3575,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -3693,16 +3693,16 @@ public class TopNQueryRunnerTest List> expectedResults = Collections.singletonList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Collections.singletonList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), topNResult ) ), QueryRunnerTestHelper.segmentId, - new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z") + Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z") ) ) ); @@ -3733,7 +3733,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3797,7 +3797,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3853,7 +3853,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -3901,7 +3901,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -3932,7 +3932,7 @@ public class TopNQueryRunnerTest map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map, @@ -3970,7 +3970,7 @@ public class TopNQueryRunnerTest map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -3997,7 +3997,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.asList( ImmutableMap.of( @@ -4030,7 +4030,7 @@ public class TopNQueryRunnerTest .build(); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-02T00:00:00.000Z"), + DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.asList( @@ -4069,7 +4069,7 @@ public class TopNQueryRunnerTest .build(); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-02T00:00:00.000Z"), + DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.asList( @@ -4120,7 +4120,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -4185,7 +4185,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -4250,7 +4250,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -4290,7 +4290,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4365,7 +4365,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4437,7 +4437,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4509,7 +4509,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4582,7 +4582,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4652,7 +4652,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -4712,7 +4712,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4784,7 +4784,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4856,7 +4856,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4928,7 +4928,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5000,7 +5000,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5075,7 +5075,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5163,7 +5163,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( expectedMap @@ -5202,7 +5202,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5280,7 +5280,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5349,7 +5349,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5476,7 +5476,7 @@ public class TopNQueryRunnerTest rows.sort((r1, r2) -> ((Comparable) r2.get(metric)).compareTo(r1.get(metric))); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(rows) ) ); @@ -5513,7 +5513,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.asList()) ) ); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java index 35d541486c1..533c6709e37 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java @@ -22,8 +22,8 @@ package io.druid.query.topn; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.query.Result; -import org.joda.time.DateTime; import java.util.List; import java.util.Map; @@ -43,6 +43,6 @@ public class TopNQueryRunnerTestHelper } expected.add(theVals); } - return new Result(new DateTime(date), new TopNResultValue(expected)); + return new Result(DateTimes.of(date), new TopNResultValue(expected)); } } diff --git a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java index 6758d2d9ecc..4d1804a3568 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.collections.StupidPool; +import io.druid.java.util.common.DateTimes; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; @@ -34,7 +35,6 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -131,7 +131,7 @@ public class TopNUnionQueryTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() diff --git a/processing/src/test/java/io/druid/segment/AppendTest.java b/processing/src/test/java/io/druid/segment/AppendTest.java index b1365002263..73ff071087b 100644 --- a/processing/src/test/java/io/druid/segment/AppendTest.java +++ b/processing/src/test/java/io/druid/segment/AppendTest.java @@ -22,6 +22,8 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -54,8 +56,6 @@ import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -102,7 +102,7 @@ public class AppendTest final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); private Segment segment; @@ -121,8 +121,8 @@ public class AppendTest new Pair("append.json.2", METRIC_AGGS) ), Arrays.asList( - new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), - new Interval("2011-01-14T22:00:00.000Z/2011-01-16T00:00:00.000Z") + Intervals.of("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), + Intervals.of("2011-01-14T22:00:00.000Z/2011-01-16T00:00:00.000Z") ) ); segment = new QueryableIndexSegment(null, appendedIndex); @@ -136,8 +136,8 @@ public class AppendTest new Pair("append.json.4", METRIC_AGGS) ), Arrays.asList( - new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), - new Interval("2011-01-13T00:00:00.000Z/2011-01-14T00:00:00.000Z") + Intervals.of("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), + Intervals.of("2011-01-13T00:00:00.000Z/2011-01-14T00:00:00.000Z") ) ); segment2 = new QueryableIndexSegment(null, append2); @@ -153,9 +153,9 @@ public class AppendTest new Pair("append.json.7", METRIC_AGGS) ), Arrays.asList( - new Interval("2011-01-12T00:00:00.000Z/2011-01-22T00:00:00.000Z"), - new Interval("2011-01-13T00:00:00.000Z/2011-01-16T00:00:00.000Z"), - new Interval("2011-01-18T00:00:00.000Z/2011-01-21T00:00:00.000Z") + Intervals.of("2011-01-12T00:00:00.000Z/2011-01-22T00:00:00.000Z"), + Intervals.of("2011-01-13T00:00:00.000Z/2011-01-16T00:00:00.000Z"), + Intervals.of("2011-01-18T00:00:00.000Z/2011-01-21T00:00:00.000Z") ) ); segment3 = new QueryableIndexSegment(null, append3); @@ -166,13 +166,13 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-15T02:00:00.000Z") + DateTimes.of("2011-01-15T02:00:00.000Z") ) ) ) @@ -191,13 +191,13 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-15T00:00:00.000Z") + DateTimes.of("2011-01-15T00:00:00.000Z") ) ) ) @@ -216,7 +216,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 8L) @@ -241,7 +241,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 7L) @@ -266,7 +266,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 5L) @@ -291,7 +291,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 4L) @@ -316,7 +316,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -362,7 +362,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -408,7 +408,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -436,7 +436,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Lists.>newArrayList() ) @@ -454,7 +454,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -477,7 +477,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -499,7 +499,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementDimension, "mezzanine"), @@ -520,7 +520,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -542,7 +542,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 5L) diff --git a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java index 47c5d888b1c..99654c6ff18 100644 --- a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java +++ b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java @@ -22,12 +22,12 @@ package io.druid.segment; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.collections.bitmap.ConciseBitmapFactory; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.column.Column; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexAdapter; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -53,7 +53,7 @@ public class EmptyIndexTest .buildOnheap(); IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter( - new Interval("2012-08-01/P3D"), + Intervals.of("2012-08-01/P3D"), emptyIndex, new ConciseBitmapFactory() ); @@ -69,7 +69,7 @@ public class EmptyIndexTest Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions())); Assert.assertEquals("getMetricNames", 0, Iterables.size(emptyQueryableIndex.getColumnNames())); - Assert.assertEquals("getDataInterval", new Interval("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval()); + Assert.assertEquals("getDataInterval", Intervals.of("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval()); Assert.assertEquals( "getReadOnlyTimestamps", 0, diff --git a/processing/src/test/java/io/druid/segment/IndexIOTest.java b/processing/src/test/java/io/druid/segment/IndexIOTest.java index ac8289cd7fe..d4470ee9dcf 100644 --- a/processing/src/test/java/io/druid/segment/IndexIOTest.java +++ b/processing/src/test/java/io/druid/segment/IndexIOTest.java @@ -29,6 +29,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.CountAggregatorFactory; @@ -62,7 +63,7 @@ import java.util.Map; @RunWith(Parameterized.class) public class IndexIOTest { - private static Interval DEFAULT_INTERVAL = Interval.parse("1970-01-01/2000-01-01"); + private static Interval DEFAULT_INTERVAL = Intervals.of("1970-01-01/2000-01-01"); private static final IndexSpec INDEX_SPEC = IndexMergerTestBase.makeIndexSpec( new ConciseBitmapSerdeFactory(), CompressedObjectStrategy.CompressionStrategy.LZ4, diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java index c6142afb3bb..f5196d062a5 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java @@ -36,6 +36,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.FloatDimensionSchema; import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; @@ -56,7 +57,6 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IndexSizeExceededException; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; @@ -1746,7 +1746,7 @@ public class IndexMergerTestBase )); closer.closeLater(index2); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1797,7 +1797,7 @@ public class IndexMergerTestBase ); closer.closeLater(index2); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1867,7 +1867,7 @@ public class IndexMergerTestBase ); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1927,7 +1927,7 @@ public class IndexMergerTestBase closer.closeLater(index5); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1976,7 +1976,7 @@ public class IndexMergerTestBase closer.closeLater(index5); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index2, factory) diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java index 7091817e617..f116a2c022a 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteSource; import com.google.common.io.Files; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.query.aggregation.AggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java index e4218468ed3..9c17dc9078d 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java @@ -28,6 +28,8 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; @@ -47,7 +49,6 @@ import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import org.apache.commons.io.FileUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; @@ -69,7 +70,7 @@ public class IndexMergerV9WithSpatialIndexTest private static IndexIO INDEX_IO = TestHelper.getTestIndexIO(); public static final int NUM_POINTS = 5000; - private static Interval DATA_INTERVAL = new Interval("2013-01-01/2013-01-07"); + private static Interval DATA_INTERVAL = Intervals.of("2013-01-01/2013-01-07"); private static AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new CountAggregatorFactory("rows"), @@ -132,10 +133,10 @@ public class IndexMergerV9WithSpatialIndexTest theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -145,10 +146,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -158,10 +159,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -171,10 +172,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -184,10 +185,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -197,10 +198,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -210,10 +211,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -222,10 +223,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -238,10 +239,10 @@ public class IndexMergerV9WithSpatialIndexTest for (int i = 8; i < NUM_POINTS; i++) { theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -359,10 +360,10 @@ public class IndexMergerV9WithSpatialIndexTest first.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -372,10 +373,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -385,10 +386,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -398,10 +399,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -411,10 +412,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -423,10 +424,10 @@ public class IndexMergerV9WithSpatialIndexTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -436,10 +437,10 @@ public class IndexMergerV9WithSpatialIndexTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -449,10 +450,10 @@ public class IndexMergerV9WithSpatialIndexTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -465,10 +466,10 @@ public class IndexMergerV9WithSpatialIndexTest for (int i = 8; i < NUM_POINTS; i++) { third.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -539,7 +540,7 @@ public class IndexMergerV9WithSpatialIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -556,7 +557,7 @@ public class IndexMergerV9WithSpatialIndexTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -593,7 +594,7 @@ public class IndexMergerV9WithSpatialIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "spatialIsRad", @@ -610,7 +611,7 @@ public class IndexMergerV9WithSpatialIndexTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -646,7 +647,7 @@ public class IndexMergerV9WithSpatialIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -663,7 +664,7 @@ public class IndexMergerV9WithSpatialIndexTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -672,7 +673,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -681,7 +682,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -690,7 +691,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -699,7 +700,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) diff --git a/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java b/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java index ac66b6c4c3d..724bcb41686 100644 --- a/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java +++ b/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java @@ -20,7 +20,7 @@ package io.druid.segment; import com.google.common.base.Throwables; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.joda.time.Days; import org.joda.time.Interval; import org.junit.Assert; @@ -54,7 +54,7 @@ public class ReferenceCountingSegmentTest @Override public Interval getDataInterval() { - return new Interval(DateTime.now().minus(Days.days(1)), DateTime.now()); + return new Interval(DateTimes.nowUtc().minus(Days.days(1)), DateTimes.nowUtc()); } @Override diff --git a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java index d5fda9e46ab..ca2caca63f2 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java @@ -29,6 +29,8 @@ import com.google.common.collect.Maps; import io.druid.data.input.MapBasedInputRow; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Comparators; @@ -49,6 +51,7 @@ import io.druid.timeline.partition.PartitionChunk; import io.druid.timeline.partition.ShardSpec; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import javax.annotation.Nullable; import java.io.File; @@ -138,7 +141,7 @@ public class SchemalessIndexTest continue; } - final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis(); + final long timestamp = new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(); if (theIndex == null) { theIndex = new IncrementalIndex.Builder() @@ -349,7 +352,7 @@ public class SchemalessIndexTest for (final Map event : events) { - final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis(); + final long timestamp = new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(); final List dims = Lists.newArrayList(); for (Map.Entry entry : event.entrySet()) { if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) { @@ -397,7 +400,7 @@ public class SchemalessIndexTest final IncrementalIndex retVal = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .withQueryGranularity(Granularities.MINUTE) .withMetrics(aggs) .build() @@ -419,7 +422,7 @@ public class SchemalessIndexTest retVal.add( new MapBasedInputRow( - new DateTime(event.get(TIMESTAMP)).getMillis(), + new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(), dims, event ) @@ -478,7 +481,7 @@ public class SchemalessIndexTest Iterables.concat( // TimelineObjectHolder is actually an iterable of iterable of indexable adapters Iterables.transform( - timeline.lookup(new Interval("1000-01-01/3000-01-01")), + timeline.lookup(Intervals.of("1000-01-01/3000-01-01")), new Function, Iterable>() { @Override diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java index 9a8fe1ba56f..ea1391825d7 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java @@ -22,6 +22,8 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; @@ -55,8 +57,6 @@ import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Test; import java.util.Arrays; @@ -94,7 +94,7 @@ public class SchemalessTestFullTest final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); @Test @@ -102,7 +102,7 @@ public class SchemalessTestFullTest { List> expectedTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -118,7 +118,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -134,7 +134,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -162,7 +162,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -181,7 +181,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -195,7 +195,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -207,13 +207,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) @@ -237,7 +237,7 @@ public class SchemalessTestFullTest { List> expectedTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -253,7 +253,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -269,7 +269,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -297,7 +297,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -325,7 +325,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( ) @@ -335,7 +335,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( ) @@ -345,13 +345,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -376,7 +376,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -392,7 +392,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -408,7 +408,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -436,7 +436,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -455,7 +455,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -467,7 +467,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(qualityDimension, "automotive") @@ -478,13 +478,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -508,7 +508,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -524,7 +524,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -540,7 +540,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -568,7 +568,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -587,7 +587,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(qualityDimension, "automotive"), @@ -599,7 +599,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(qualityDimension, "automotive") @@ -610,13 +610,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) @@ -640,7 +640,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -658,7 +658,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -679,7 +679,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -693,13 +693,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -723,7 +723,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -739,7 +739,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -755,7 +755,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -783,7 +783,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -802,7 +802,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -816,13 +816,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -846,7 +846,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -862,7 +862,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 0L) @@ -878,7 +878,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Collections.singletonList( QueryRunnerTestHelper.orderedMap( @@ -896,7 +896,7 @@ public class SchemalessTestFullTest ); List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Collections.emptyList() ) @@ -905,7 +905,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Collections.emptyList() ) @@ -915,13 +915,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -947,7 +947,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -965,7 +965,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -986,7 +986,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -1000,13 +1000,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -1032,7 +1032,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -1048,7 +1048,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -1064,7 +1064,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( QueryRunnerTestHelper.orderedMap( @@ -1092,7 +1092,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1112,7 +1112,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementDimension, "mezzanine") @@ -1123,7 +1123,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList() ) @@ -1132,13 +1132,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -1162,7 +1162,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 11L) @@ -1178,7 +1178,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 4L) @@ -1195,7 +1195,7 @@ public class SchemalessTestFullTest /* Uncomment when Druid support for nulls/empty strings is actually consistent List> expectedTopNResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1232,7 +1232,7 @@ public class SchemalessTestFullTest */ List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1269,7 +1269,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1297,7 +1297,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -1311,7 +1311,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -1323,13 +1323,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java index 2a8489f0c47..69facab0d42 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java @@ -22,6 +22,8 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.Druids; @@ -53,8 +55,6 @@ import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; import io.druid.segment.incremental.IncrementalIndex; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -118,7 +118,7 @@ public class SchemalessTestSimpleTest final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); private Segment segment; @@ -153,7 +153,7 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 11L) @@ -199,7 +199,7 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( new DimensionAndMetricValueExtractor( @@ -257,7 +257,7 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -283,13 +283,13 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) diff --git a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java index c98fe959ee9..6103abfd867 100644 --- a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java +++ b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java @@ -22,6 +22,7 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.data.CompressedObjectStrategy; @@ -44,7 +45,7 @@ import java.util.Map; public class StringDimensionHandlerTest { - private static final Interval TEST_INTERVAL = Interval.parse("2015-01-01/2015-12-31"); + private static final Interval TEST_INTERVAL = Intervals.of("2015-01-01/2015-12-31"); private static final IndexSpec INDEX_SPEC = new IndexSpec( new ConciseBitmapSerdeFactory(), diff --git a/processing/src/test/java/io/druid/segment/TestHelper.java b/processing/src/test/java/io/druid/segment/TestHelper.java index c62b0259c9c..a0f5e62d5d2 100644 --- a/processing/src/test/java/io/druid/segment/TestHelper.java +++ b/processing/src/test/java/io/druid/segment/TestHelper.java @@ -165,7 +165,11 @@ public class TestHelper && (((Result) expectedNext).getValue()) instanceof TopNResultValue) { // Special to allow a floating point delta to be used in result comparison due to legacy expected results assertTopNResultValue(failMsg, (Result) expectedNext, (Result) next); - assertTopNResultValue(String.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), (Result) expectedNext, (Result) next2); + assertTopNResultValue( + StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), + (Result) expectedNext, + (Result) next2 + ); } else { assertResult(failMsg, (Result) expectedNext, (Result) next); assertResult( @@ -290,7 +294,7 @@ public class TestHelper Assert.assertEquals("Size of list must match", listExpectedRows.size(), listActualRows.size()); IntStream.range(0, listExpectedRows.size()).forEach(value -> assertRow( - String.format("%s, on value number [%s]", msg, value), + StringUtils.format("%s, on value number [%s]", msg, value), listExpectedRows.get(value), listActualRows.get(value) )); diff --git a/processing/src/test/java/io/druid/segment/TestIndex.java b/processing/src/test/java/io/druid/segment/TestIndex.java index c59c89f8f5c..4271eaa213b 100644 --- a/processing/src/test/java/io/druid/segment/TestIndex.java +++ b/processing/src/test/java/io/druid/segment/TestIndex.java @@ -34,6 +34,8 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.hll.HyperLogLogHash; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; @@ -50,7 +52,6 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.virtual.ExpressionVirtualColumn; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.File; @@ -117,7 +118,7 @@ public class TestIndex public static final String[] DOUBLE_METRICS = new String[]{"index", "indexMin", "indexMaxPlusTen"}; public static final String[] FLOAT_METRICS = new String[]{"indexFloat", "indexMinFloat", "indexMaxFloat"}; private static final Logger log = new Logger(TestIndex.class); - private static final Interval DATA_INTERVAL = new Interval("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); + private static final Interval DATA_INTERVAL = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); private static final VirtualColumns VIRTUAL_COLUMNS = VirtualColumns.create( Collections.singletonList( new ExpressionVirtualColumn("expr", "index + 10", ValueType.FLOAT, TestExprMacroTable.INSTANCE) @@ -269,7 +270,7 @@ public class TestIndex public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .withTimestampSpec(new TimestampSpec("ds", "auto", null)) .withDimensionsSpec(DIMENSIONS_SPEC) .withVirtualColumns(VIRTUAL_COLUMNS) diff --git a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java index e6849544a7a..bbcc3697ca3 100644 --- a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java @@ -34,6 +34,7 @@ import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; @@ -468,7 +469,7 @@ public class IncrementalIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2000/2030"))) + .intervals(ImmutableList.of(Intervals.of("2000/2030"))) .aggregators(queryAggregatorFactories) .build(); @@ -569,7 +570,7 @@ public class IncrementalIndexTest ) ); final long timestamp = System.currentTimeMillis(); - final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); + final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); final List> indexFutures = Lists.newArrayListWithExpectedSize(concurrentThreads); final List> queryFutures = Lists.newArrayListWithExpectedSize(concurrentThreads); final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null); diff --git a/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java b/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java index ed7a773de81..2d8c2440249 100644 --- a/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.DimFilter; @@ -35,7 +36,6 @@ import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -52,7 +52,7 @@ public class AndFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java index 7411c44e388..61f4dd76447 100644 --- a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java @@ -27,7 +27,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.common.guava.SettableSupplier; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.data.input.InputRow; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; @@ -66,7 +66,6 @@ import io.druid.segment.data.RoaringBitmapSerdeFactory; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexStorageAdapter; import io.druid.segment.virtual.ExpressionVirtualColumn; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -302,7 +301,7 @@ public abstract class BaseFilterTest { return adapter.makeCursors( filter, - new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT), + Intervals.ETERNITY, VIRTUAL_COLUMNS, Granularities.ALL, false, diff --git a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java index 0e11e4ed531..f94e8769ba4 100644 --- a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -36,7 +37,6 @@ import io.druid.query.filter.BoundDimFilter; import io.druid.query.ordering.StringComparators; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -53,7 +53,7 @@ public class BoundFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java index 9b14dac1fab..8aa0e588255 100644 --- a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; @@ -38,7 +39,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -55,7 +55,7 @@ public class ColumnComparisonFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java index 2ac8e3bbbaa..8e3837e5c7e 100644 --- a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java @@ -31,13 +31,13 @@ import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.expression.TestExprMacroTable; import io.druid.query.filter.ExpressionDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -55,7 +55,7 @@ public class ExpressionFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( ImmutableList.of( new StringDimensionSchema("dim0"), diff --git a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java index 5c5b96e39fc..309a9b90845 100644 --- a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java +++ b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java @@ -30,6 +30,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -46,7 +47,6 @@ import io.druid.query.filter.OrDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -157,7 +157,7 @@ public class FilterPartitionTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java index 4ae510a720b..b737be71623 100644 --- a/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java @@ -35,6 +35,7 @@ import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.MapLookupExtractor; @@ -52,7 +53,6 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -79,7 +79,7 @@ public class FloatAndDoubleFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( ImmutableList.of( new StringDimensionSchema("dim0"), diff --git a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java index 54a136569de..74d5c58adfb 100644 --- a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java @@ -29,6 +29,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -40,7 +41,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -57,7 +57,7 @@ public class InFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java index 5c8f52bee0a..8f28cb99e6f 100644 --- a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; @@ -37,7 +38,6 @@ import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -56,7 +56,7 @@ public class InvalidFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java index c923545e186..fd140811a42 100644 --- a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -37,7 +38,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -54,7 +54,7 @@ public class JavaScriptFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java index 07dd395dd89..728bdc9e738 100644 --- a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java @@ -28,12 +28,12 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.extraction.SubstringDimExtractionFn; import io.druid.query.filter.LikeDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -50,7 +50,7 @@ public class LikeFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java index b20c7f7575e..8e621bb10f0 100644 --- a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -50,7 +51,6 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -76,7 +76,7 @@ public class LongFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java index d581d999bf4..6fdb7191fc8 100644 --- a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java @@ -28,12 +28,12 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -50,7 +50,7 @@ public class NotFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java index 5b177928c86..cb62975b846 100644 --- a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -35,7 +36,6 @@ import io.druid.query.extraction.JavaScriptExtractionFn; import io.druid.query.filter.RegexDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -52,7 +52,7 @@ public class RegexFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java index d3c24c5f6aa..49fd96b2b68 100644 --- a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -37,7 +38,6 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.query.search.search.SearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -54,7 +54,7 @@ public class SearchQueryFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java index e3bd81b4727..2f41178aee5 100644 --- a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.extraction.TimeDimExtractionFn; @@ -38,7 +39,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -57,7 +57,7 @@ public class SelectorFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3", "dim6")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java index 2279836aba7..f2def054670 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java @@ -28,6 +28,8 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; @@ -56,7 +58,6 @@ import io.druid.segment.Segment; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; @@ -77,7 +78,7 @@ import java.util.Set; public class SpatialFilterBonusTest { public static final int NUM_POINTS = 5000; - private static Interval DATA_INTERVAL = new Interval("2013-01-01/2013-01-07"); + private static Interval DATA_INTERVAL = Intervals.of("2013-01-01/2013-01-07"); private static AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") @@ -142,10 +143,10 @@ public class SpatialFilterBonusTest theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", "val", 17L @@ -154,10 +155,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", "val", 29L @@ -166,10 +167,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", "val", 13L @@ -178,10 +179,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", "val", 91L @@ -190,10 +191,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", "val", 47L @@ -202,10 +203,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -230,10 +231,10 @@ public class SpatialFilterBonusTest } theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", coord, "val", i @@ -333,10 +334,10 @@ public class SpatialFilterBonusTest first.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", "val", 17L @@ -345,10 +346,10 @@ public class SpatialFilterBonusTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", "val", 29L @@ -357,10 +358,10 @@ public class SpatialFilterBonusTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", "val", 13L @@ -369,10 +370,10 @@ public class SpatialFilterBonusTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -381,10 +382,10 @@ public class SpatialFilterBonusTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", "val", 91L @@ -393,10 +394,10 @@ public class SpatialFilterBonusTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", "val", 47L @@ -409,10 +410,10 @@ public class SpatialFilterBonusTest for (int i = 6; i < NUM_POINTS; i++) { third.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", StringUtils.format( "%s,%s", @@ -474,7 +475,7 @@ public class SpatialFilterBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -491,7 +492,7 @@ public class SpatialFilterBonusTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -526,7 +527,7 @@ public class SpatialFilterBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -543,7 +544,7 @@ public class SpatialFilterBonusTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -552,7 +553,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -561,7 +562,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -570,7 +571,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -579,7 +580,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -614,7 +615,7 @@ public class SpatialFilterBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .aggregators( Arrays.asList( new CountAggregatorFactory("rows"), @@ -632,7 +633,7 @@ public class SpatialFilterBonusTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 4995L) @@ -642,7 +643,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -652,7 +653,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -662,7 +663,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -672,7 +673,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java index 365d7be7d1a..7e8b1871815 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java @@ -28,6 +28,8 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; @@ -54,7 +56,6 @@ import io.druid.segment.Segment; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; @@ -76,7 +77,7 @@ public class SpatialFilterTest private static IndexIO INDEX_IO = TestHelper.getTestIndexIO(); public static final int NUM_POINTS = 5000; - private static Interval DATA_INTERVAL = new Interval("2013-01-01/2013-01-07"); + private static Interval DATA_INTERVAL = Intervals.of("2013-01-01/2013-01-07"); private static AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new CountAggregatorFactory("rows"), @@ -139,10 +140,10 @@ public class SpatialFilterTest theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -152,10 +153,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -165,10 +166,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -178,10 +179,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -191,10 +192,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -204,10 +205,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -217,10 +218,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -229,10 +230,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -245,10 +246,10 @@ public class SpatialFilterTest for (int i = 8; i < NUM_POINTS; i++) { theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -359,10 +360,10 @@ public class SpatialFilterTest first.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -372,10 +373,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -385,10 +386,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -398,10 +399,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -411,10 +412,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -423,10 +424,10 @@ public class SpatialFilterTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -436,10 +437,10 @@ public class SpatialFilterTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -449,10 +450,10 @@ public class SpatialFilterTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -465,10 +466,10 @@ public class SpatialFilterTest for (int i = 8; i < NUM_POINTS; i++) { third.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -530,7 +531,7 @@ public class SpatialFilterTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -547,7 +548,7 @@ public class SpatialFilterTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -583,7 +584,7 @@ public class SpatialFilterTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "spatialIsRad", @@ -600,7 +601,7 @@ public class SpatialFilterTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -635,7 +636,7 @@ public class SpatialFilterTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -652,7 +653,7 @@ public class SpatialFilterTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -661,7 +662,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -670,7 +671,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -679,7 +680,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -688,7 +689,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) diff --git a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java index aa0a2d7e6a3..4c816704912 100644 --- a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java @@ -28,6 +28,8 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -48,9 +50,7 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.column.Column; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -70,7 +70,7 @@ public class TimeFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, @@ -238,7 +238,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - Arrays.asList(Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), null ), ImmutableList.of("1", "2", "3", "4") @@ -248,8 +248,8 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), - Interval.parse("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), + Intervals.of("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") ), null ), @@ -260,9 +260,9 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), - Interval.parse("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), - Interval.parse("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), + Intervals.of("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), + Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") ), null ), @@ -275,7 +275,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - Arrays.asList(Interval.parse("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), exFn ), ImmutableList.of("1", "2", "3", "4") @@ -288,7 +288,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( "dim0", - Arrays.asList(Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), null ), ImmutableList.of("1", "2", "3", "4") @@ -298,8 +298,8 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( "dim0", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), - Interval.parse("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), + Intervals.of("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") ), null ), @@ -310,9 +310,9 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( "dim0", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), - Interval.parse("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), - Interval.parse("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), + Intervals.of("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), + Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") ), null ), @@ -322,7 +322,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( "dim1", - Arrays.asList(Interval.parse("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.011Z")), + Arrays.asList(Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.011Z")), null ), ImmutableList.of("1", "2") @@ -334,7 +334,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( "dim0", - Arrays.asList(Interval.parse("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), exFn ), ImmutableList.of("1", "2", "3", "4") diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index 97e14777d31..8cc1375ddb0 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -29,6 +29,8 @@ import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -114,14 +116,14 @@ public class IncrementalIndexStorageAdapterTest IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy"), ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("sally"), ImmutableMap.of("sally", "bo") ) @@ -133,7 +135,7 @@ public class IncrementalIndexStorageAdapterTest GroupByQuery.builder() .setDataSource("test") .setGranularity(Granularities.ALL) - .setInterval(new Interval(0, new DateTime().getMillis())) + .setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())) .addDimension("billy") .addDimension("sally") .addAggregator(new LongSumAggregatorFactory("cnt", "cnt")) @@ -158,14 +160,14 @@ public class IncrementalIndexStorageAdapterTest IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( - new DateTime("2014-09-01T00:00:00"), + DateTimes.of("2014-09-01T00:00:00"), Lists.newArrayList("billy"), ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( - new DateTime("2014-09-01T01:00:00"), + DateTimes.of("2014-09-01T01:00:00"), Lists.newArrayList("billy", "sally"), ImmutableMap.of( "billy", "hip", @@ -180,7 +182,7 @@ public class IncrementalIndexStorageAdapterTest GroupByQuery.builder() .setDataSource("test") .setGranularity(Granularities.ALL) - .setInterval(new Interval(0, new DateTime().getMillis())) + .setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())) .addDimension("billy") .addDimension("sally") .addAggregator( @@ -243,7 +245,7 @@ public class IncrementalIndexStorageAdapterTest { IncrementalIndex index = indexCreator.createIndex(); - DateTime t = DateTime.now(); + DateTime t = DateTimes.nowUtc(); Interval interval = new Interval(t.minusMinutes(1), t.plusMinutes(1)); index.add( @@ -299,7 +301,7 @@ public class IncrementalIndexStorageAdapterTest public void testSingleValueTopN() throws IOException { IncrementalIndex index = indexCreator.createIndex(); - DateTime t = DateTime.now(); + DateTime t = DateTimes.nowUtc(); index.add( new MapBasedInputRow( t.minus(1).getMillis(), @@ -326,7 +328,7 @@ public class IncrementalIndexStorageAdapterTest engine.query( new TopNQueryBuilder().dataSource("test") .granularity(Granularities.ALL) - .intervals(Lists.newArrayList(new Interval(0, new DateTime().getMillis()))) + .intervals(Lists.newArrayList(new Interval(DateTimes.EPOCH, DateTimes.nowUtc()))) .dimension("sally") .metric("cnt") .threshold(10) @@ -355,14 +357,14 @@ public class IncrementalIndexStorageAdapterTest IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy"), ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("sally"), ImmutableMap.of("sally", "bo") ) @@ -374,7 +376,7 @@ public class IncrementalIndexStorageAdapterTest GroupByQuery.builder() .setDataSource("test") .setGranularity(Granularities.ALL) - .setInterval(new Interval(0, new DateTime().getMillis())) + .setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())) .addDimension("billy") .addDimension("sally") .addAggregator(new LongSumAggregatorFactory("cnt", "cnt")) @@ -411,7 +413,7 @@ public class IncrementalIndexStorageAdapterTest Sequence cursors = sa.makeCursors( null, - new Interval(timestamp - 60_000, timestamp + 60_000), + Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, @@ -494,7 +496,7 @@ public class IncrementalIndexStorageAdapterTest Sequence cursors = sa.makeCursors( null, - new Interval(timestamp - 60_000, timestamp + 60_000), + Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java index aaee24f5353..e283efed090 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java @@ -37,7 +37,6 @@ import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.CloserRule; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -152,14 +151,14 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) @@ -172,7 +171,7 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) @@ -185,21 +184,21 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "C", "joe", "B") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) @@ -212,7 +211,7 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("string", "float", "long", "double"), ImmutableMap.of( "string", Arrays.asList("A", null, ""), @@ -235,7 +234,7 @@ public class IncrementalIndexTest public void sameRow() throws IndexSizeExceededException { MapBasedInputRow row = new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ); diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java index 446d5a2089f..f6ecc26c13c 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java @@ -33,6 +33,7 @@ import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -338,7 +339,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark ) ); final long timestamp = System.currentTimeMillis(); - final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); + final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); final List> indexFutures = new LinkedList<>(); final List> queryFutures = new LinkedList<>(); final Segment incrementalIndexSegment = new IncrementalIndexSegment(incrementalIndex, null); diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 8f845acd1a5..e843963e00c 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -48,6 +48,7 @@ import io.druid.client.selector.ServerSelector; import io.druid.concurrent.Execs; import io.druid.guice.annotations.BackgroundCaching; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.BaseSequence; @@ -329,7 +330,7 @@ public class CachingClusteredClient implements QuerySegmentWalker long intervalStart = holderInterval.getStartMillis(); if (!uncoveredIntervalsOverflowed && startMillis != intervalStart) { if (uncoveredIntervalsLimit > uncoveredIntervals.size()) { - uncoveredIntervals.add(new Interval(startMillis, intervalStart)); + uncoveredIntervals.add(Intervals.utc(startMillis, intervalStart)); } else { uncoveredIntervalsOverflowed = true; } @@ -339,7 +340,7 @@ public class CachingClusteredClient implements QuerySegmentWalker if (!uncoveredIntervalsOverflowed && startMillis < endMillis) { if (uncoveredIntervalsLimit > uncoveredIntervals.size()) { - uncoveredIntervals.add(new Interval(startMillis, endMillis)); + uncoveredIntervals.add(Intervals.utc(startMillis, endMillis)); } else { uncoveredIntervalsOverflowed = true; } diff --git a/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java b/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java index 5996f500adb..a3eaee13323 100644 --- a/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java +++ b/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java @@ -31,6 +31,7 @@ import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; @@ -180,7 +181,7 @@ public class CuratorDruidNodeDiscoveryProvider extends DruidNodeDiscoveryProvide this.jsonMapper = jsonMapper; // This is required to be single threaded from Docs in PathChildrenCache; - this.cacheExecutor = Execs.singleThreaded(String.format("NodeTypeWatcher[%s]", nodeType)); + this.cacheExecutor = Execs.singleThreaded(StringUtils.format("NodeTypeWatcher[%s]", nodeType)); this.cache = new PathChildrenCache( curatorFramework, ZKPaths.makePath(basePath, nodeType), diff --git a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java index 034bd6ad2ab..6f1a140ded1 100644 --- a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java +++ b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java @@ -22,13 +22,12 @@ package io.druid.indexer; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; - +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.SQLMetadataConnector; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.IDBI; import org.skife.jdbi.v2.PreparedBatch; @@ -71,7 +70,7 @@ public class SQLMetadataStorageUpdaterJobHandler implements MetadataStorageUpdat new ImmutableMap.Builder() .put("id", segment.getIdentifier()) .put("dataSource", segment.getDataSource()) - .put("created_date", new DateTime().toString()) + .put("created_date", DateTimes.nowUtc().toString()) .put("start", segment.getInterval().getStart().toString()) .put("end", segment.getInterval().getEnd().toString()) .put("partitioned", (segment.getShardSpec() instanceof NoneShardSpec) ? false : true) diff --git a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index c1750101c1e..53035f94120 100644 --- a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -32,12 +32,13 @@ import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.google.common.io.BaseEncoding; import com.google.inject.Inject; -import io.druid.common.utils.JodaUtils; import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.indexing.overlord.SegmentPublishResult; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.logger.Logger; @@ -49,7 +50,6 @@ import io.druid.timeline.partition.LinearShardSpec; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.PartitionChunk; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; @@ -315,8 +315,9 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor // Find which segments are used (i.e. not overshadowed). final Set usedSegments = Sets.newHashSet(); - for (TimelineObjectHolder holder : VersionedIntervalTimeline.forSegments(segments) - .lookupWithIncompletePartitions(JodaUtils.ETERNITY)) { + List> segmentHolders = + VersionedIntervalTimeline.forSegments(segments).lookupWithIncompletePartitions(Intervals.ETERNITY); + for (TimelineObjectHolder holder : segmentHolders) { for (PartitionChunk chunk : holder.getObject()) { usedSegments.add(chunk.getObject()); } @@ -568,7 +569,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor ) .bind("id", newIdentifier.getIdentifierAsString()) .bind("dataSource", dataSource) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .bind("sequence_name", sequenceName) @@ -622,7 +623,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor ) .bind("id", segment.getIdentifier()) .bind("dataSource", segment.getDataSource()) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("start", segment.getInterval().getStart().toString()) .bind("end", segment.getInterval().getEnd().toString()) .bind("partitioned", (segment.getShardSpec() instanceof NoneShardSpec) ? false : true) @@ -765,7 +766,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor ) ) .bind("dataSource", dataSource) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("commit_metadata_payload", newCommitMetadataBytes) .bind("commit_metadata_sha1", newCommitMetadataSha1) .execute(); diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java index a3fa59e0def..7d3f972d8bb 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java @@ -39,6 +39,7 @@ import io.druid.client.DruidServer; import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -108,7 +109,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager ) ) ); - final String version = new DateTime().toString(); + final String version = DateTimes.nowUtc().toString(); handle.createStatement( StringUtils.format( "INSERT INTO %s (id, dataSource, version, payload) VALUES (:id, :dataSource, :version, :payload)", @@ -365,7 +366,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager @Override public Void inTransaction(Handle handle, TransactionStatus transactionStatus) throws Exception { - final DateTime auditTime = DateTime.now(); + final DateTime auditTime = DateTimes.nowUtc(); auditManager.doAudit( AuditEntry.builder() .key(dataSource) diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java index 0c0255384ef..7308e2a46cd 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java @@ -37,6 +37,8 @@ import com.metamx.emitter.EmittingLogger; import io.druid.client.DruidDataSource; import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -223,11 +225,10 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager ); final List segments = Lists.newArrayList(); - for (TimelineObjectHolder objectHolder : segmentTimeline.lookup( - new Interval( - "0000-01-01/3000-01-01" - ) - )) { + List> timelineObjectHolders = segmentTimeline.lookup( + Intervals.of("0000-01-01/3000-01-01") + ); + for (TimelineObjectHolder objectHolder : timelineObjectHolders) { for (PartitionChunk partitionChunk : objectHolder.getObject()) { segments.add(partitionChunk.getObject()); } @@ -504,7 +505,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager if (dataSource == null) { dataSource = new DruidDataSource( datasourceName, - ImmutableMap.of("created", new DateTime().toString()) + ImmutableMap.of("created", DateTimes.nowUtc().toString()) ); Object shouldBeNull = newDataSources.put( diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java index 4f95a2ad898..6bee04c6468 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java @@ -22,12 +22,11 @@ package io.druid.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Inject; - +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.tweak.HandleCallback; @@ -68,7 +67,7 @@ public class SQLMetadataSegmentPublisher implements MetadataSegmentPublisher publishSegment( segment.getIdentifier(), segment.getDataSource(), - new DateTime().toString(), + DateTimes.nowUtc().toString(), segment.getInterval().getStart().toString(), segment.getInterval().getEnd().toString(), (segment.getShardSpec() instanceof NoneShardSpec) ? false : true, diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java index 6d65fbf9ca5..c7436e7961b 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java @@ -27,16 +27,14 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; - import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.indexing.overlord.supervisor.SupervisorSpec; import io.druid.indexing.overlord.supervisor.VersionedSupervisorSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; - -import org.joda.time.DateTime; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; import org.skife.jdbi.v2.Handle; @@ -95,7 +93,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager ) ) .bind("spec_id", id) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("payload", jsonMapper.writeValueAsBytes(spec)) .execute(); diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index 118b53221fe..398511460d8 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -27,7 +27,7 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.PeekingIterator; import com.google.common.collect.Sets; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -104,7 +104,7 @@ public class ArbitraryGranularitySpec implements GranularitySpec public Optional bucketInterval(DateTime dt) { // First interval with start time ≤ dt - final Interval interval = intervals.floor(new Interval(dt, new DateTime(JodaUtils.MAX_INSTANT))); + final Interval interval = intervals.floor(new Interval(dt, DateTimes.MAX)); if (interval != null && interval.contains(dt)) { return Optional.of(interval); diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java index a2492e483f5..fe3c4886f5f 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java @@ -45,6 +45,7 @@ import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; @@ -71,7 +72,6 @@ import io.druid.server.coordination.DataSegmentAnnouncer; import io.druid.timeline.DataSegment; import io.druid.timeline.VersionedIntervalTimeline; import org.apache.commons.io.FileUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; @@ -716,7 +716,7 @@ public class AppenderatorImpl implements Appenderator private void resetNextFlush() { - nextFlush = new DateTime().plus(tuningConfig.getIntermediatePersistPeriod()).getMillis(); + nextFlush = DateTimes.nowUtc().plus(tuningConfig.getIntermediatePersistPeriod()).getMillis(); } /** diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java index 678fb2e7761..c3e8678ba30 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java @@ -35,6 +35,7 @@ import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; @@ -243,14 +244,15 @@ public class AppenderatorPlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); - final long truncatedTime = segmentGranularity.bucketStart(new DateTime(timestamp)).getMillis(); + DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); + final long truncatedTime = truncatedDateTime.getMillis(); SegmentIdentifier retVal = segments.get(truncatedTime); if (retVal == null) { final Interval interval = new Interval( - new DateTime(truncatedTime), - segmentGranularity.increment(new DateTime(truncatedTime)) + truncatedDateTime, + segmentGranularity.increment(truncatedDateTime) ); retVal = new SegmentIdentifier( @@ -335,12 +337,12 @@ public class AppenderatorPlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final Period windowPeriod = config.getWindowPeriod(); - final DateTime truncatedNow = segmentGranularity.bucketStart(new DateTime()); + final DateTime truncatedNow = segmentGranularity.bucketStart(DateTimes.nowUtc()); final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info( "Expect to run at [%s]", - new DateTime().plus( + DateTimes.nowUtc().plus( new Duration( System.currentTimeMillis(), segmentGranularity.increment(truncatedNow).getMillis() + windowMillis @@ -393,14 +395,7 @@ public class AppenderatorPlumber implements Plumber final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info("Starting merge and push."); DateTime minTimestampAsDate = segmentGranularity.bucketStart( - new DateTime( - Math.max( - windowMillis, - rejectionPolicy.getCurrMaxTime() - .getMillis() - ) - - windowMillis - ) + DateTimes.utc(Math.max(windowMillis, rejectionPolicy.getCurrMaxTime().getMillis()) - windowMillis) ); long minTimestamp = minTimestampAsDate.getMillis(); @@ -426,7 +421,7 @@ public class AppenderatorPlumber implements Plumber log.info( "Skipping persist and merge for entry [%s] : Start time [%s] >= [%s] min timestamp required in this run. Segment will be picked up in a future run.", segment, - new DateTime(intervalStart), + DateTimes.utc(intervalStart), minTimestampAsDate ); } diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java index 6e462f6b309..8b280dd4ec4 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java @@ -41,6 +41,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.impl.MapInputRowParser; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.DateTimes; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; import org.joda.time.DateTime; @@ -323,7 +324,7 @@ public class EventReceiverFirehoseFactory implements FirehoseFactory theEvent = Maps.newLinkedHashMap(); final long timestamp = timestampColumnSelector.getLong(); - theEvent.put(EventHolder.timestampKey, new DateTime(timestamp)); + theEvent.put(EventHolder.timestampKey, DateTimes.utc(timestamp)); for (Map.Entry dimSelector : dimSelectors.entrySet()) { final String dim = dimSelector.getKey(); diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java index 099b7c12a3e..d1fd632672c 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java @@ -33,6 +33,7 @@ import com.ircclouds.irc.api.state.IIRCState; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.logger.Logger; import org.joda.time.DateTime; @@ -112,7 +113,7 @@ public class IrcFirehoseFactory implements FirehoseFactory public void onChannelMessage(ChannelPrivMsg aMsg) { try { - queue.put(Pair.of(DateTime.now(), aMsg)); + queue.put(Pair.of(DateTimes.nowUtc(), aMsg)); } catch (InterruptedException e) { throw new RuntimeException("interrupted adding message to queue", e); diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java b/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java index 9a1de9d331b..331b5ff34a5 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java @@ -21,7 +21,7 @@ package io.druid.segment.realtime.plumber; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.joda.time.Interval; /** @@ -35,7 +35,7 @@ public class CustomVersioningPolicy implements VersioningPolicy @JsonProperty("version") String version ) { - this.version = version == null ? new DateTime().toString() : version; + this.version = version == null ? DateTimes.nowUtc().toString() : version; } @Override diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java index 0294103b132..3938d017a92 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java @@ -27,6 +27,7 @@ import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.concurrent.ScheduledExecutors; @@ -121,7 +122,7 @@ public class FlushingPlumber extends RealtimePlumber log.info( "Abandoning segment %s at %s", sink.getSegment().getIdentifier(), - new DateTime().plusMillis((int) flushDuration.getMillis()) + DateTimes.nowUtc().plusMillis((int) flushDuration.getMillis()) ); ScheduledExecutors.scheduleWithFixedDelay( @@ -143,12 +144,12 @@ public class FlushingPlumber extends RealtimePlumber private void startFlushThread() { final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); - final DateTime truncatedNow = segmentGranularity.bucketStart(new DateTime()); + final DateTime truncatedNow = segmentGranularity.bucketStart(DateTimes.nowUtc()); final long windowMillis = config.getWindowPeriod().toStandardDuration().getMillis(); log.info( "Expect to run at [%s]", - new DateTime().plus( + DateTimes.nowUtc().plus( new Duration( System.currentTimeMillis(), schema.getGranularitySpec().getSegmentGranularity().increment(truncatedNow).getMillis() + windowMillis diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java index 1773abdf695..083f6384cd1 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java @@ -19,7 +19,8 @@ package io.druid.segment.realtime.plumber; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.Period; @@ -53,7 +54,7 @@ public class MessageTimeRejectionPolicyFactory implements RejectionPolicyFactory @Override public DateTime getCurrMaxTime() { - return new DateTime(maxTimestamp); + return DateTimes.utc(maxTimestamp); } @Override diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java index de572665a09..678516b7585 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; @@ -32,7 +33,7 @@ public class NoopRejectionPolicyFactory implements RejectionPolicyFactory @Override public DateTime getCurrMaxTime() { - return new DateTime(0); + return DateTimes.EPOCH; } @Override diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java index 538440b2d30..3fe234db39a 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java @@ -42,7 +42,9 @@ import io.druid.concurrent.Execs; import io.druid.concurrent.TaskThreadPriority; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; @@ -230,14 +232,15 @@ public class RealtimePlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); - final long truncatedTime = segmentGranularity.bucketStart(new DateTime(timestamp)).getMillis(); + DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); + final long truncatedTime = truncatedDateTime.getMillis(); Sink retVal = sinks.get(truncatedTime); if (retVal == null) { final Interval sinkInterval = new Interval( - new DateTime(truncatedTime), - segmentGranularity.increment(new DateTime(truncatedTime)) + truncatedDateTime, + segmentGranularity.increment(truncatedDateTime) ); retVal = new Sink( @@ -354,7 +357,7 @@ public class RealtimePlumber implements Plumber private void persistAndMerge(final long truncatedTime, final Sink sink) { final String threadName = StringUtils.format( - "%s-%s-persist-n-merge", schema.getDataSource(), new DateTime(truncatedTime) + "%s-%s-persist-n-merge", schema.getDataSource(), DateTimes.utc(truncatedTime) ); mergeExecutor.execute( new ThreadRenamingRunnable(threadName) @@ -542,7 +545,7 @@ public class RealtimePlumber implements Plumber private void resetNextFlush() { - nextFlush = new DateTime().plus(config.getIntermediatePersistPeriod()).getMillis(); + nextFlush = DateTimes.nowUtc().plus(config.getIntermediatePersistPeriod()).getMillis(); } protected void initializeExecutors() @@ -598,7 +601,7 @@ public class RealtimePlumber implements Plumber Object metadata = null; long latestCommitTime = 0; for (File sinkDir : files) { - final Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/")); + final Interval sinkInterval = Intervals.of(sinkDir.getName().replace("_", "/")); //final File[] sinkFiles = sinkDir.listFiles(); // To avoid reading and listing of "merged" dir @@ -739,12 +742,12 @@ public class RealtimePlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final Period windowPeriod = config.getWindowPeriod(); - final DateTime truncatedNow = segmentGranularity.bucketStart(new DateTime()); + final DateTime truncatedNow = segmentGranularity.bucketStart(DateTimes.nowUtc()); final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info( "Expect to run at [%s]", - new DateTime().plus( + DateTimes.nowUtc().plus( new Duration( System.currentTimeMillis(), segmentGranularity.increment(truncatedNow).getMillis() + windowMillis @@ -797,14 +800,7 @@ public class RealtimePlumber implements Plumber final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info("Starting merge and push."); DateTime minTimestampAsDate = segmentGranularity.bucketStart( - new DateTime( - Math.max( - windowMillis, - rejectionPolicy.getCurrMaxTime() - .getMillis() - ) - - windowMillis - ) + DateTimes.utc(Math.max(windowMillis, rejectionPolicy.getCurrMaxTime().getMillis()) - windowMillis) ); long minTimestamp = minTimestampAsDate.getMillis(); @@ -824,7 +820,7 @@ public class RealtimePlumber implements Plumber log.info( "Skipping persist and merge for entry [%s] : Start time [%s] >= [%s] min timestamp required in this run. Segment will be picked up in a future run.", entry, - new DateTime(intervalStart), + DateTimes.utc(intervalStart), minTimestampAsDate ); } diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java index a52639b02c3..878b1164b94 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.Period; @@ -35,7 +36,7 @@ public class ServerTimeRejectionPolicyFactory implements RejectionPolicyFactory @Override public DateTime getCurrMaxTime() { - return new DateTime(); + return DateTimes.nowUtc(); } @Override diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 3af0b2e2d86..84f4485dcf2 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -31,6 +31,7 @@ import com.metamx.emitter.service.ServiceEmitter; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.guice.http.DruidHttpClientConfig; +import io.druid.java.util.common.DateTimes; import io.druid.query.DruidMetrics; import io.druid.query.GenericQueryMetricsFactory; import io.druid.query.Query; @@ -47,7 +48,6 @@ import org.eclipse.jetty.client.api.Result; import org.eclipse.jetty.client.util.BytesContentProvider; import org.eclipse.jetty.http.HttpMethod; import org.eclipse.jetty.proxy.AsyncProxyServlet; -import org.joda.time.DateTime; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -221,7 +221,7 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu final String errorMessage = e.getMessage() == null ? "no error message" : e.getMessage(); requestLogger.log( new RequestLogLine( - new DateTime(), + DateTimes.nowUtc(), request.getRemoteAddr(), null, new QueryStats(ImmutableMap.of("success", false, "exception", errorMessage)) @@ -394,7 +394,7 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu emitQueryTime(requestTimeNs, success); requestLogger.log( new RequestLogLine( - new DateTime(), + DateTimes.nowUtc(), req.getRemoteAddr(), query, new QueryStats( @@ -427,7 +427,7 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu emitQueryTime(System.nanoTime() - startNs, false); requestLogger.log( new RequestLogLine( - new DateTime(), + DateTimes.nowUtc(), req.getRemoteAddr(), query, new QueryStats( diff --git a/server/src/main/java/io/druid/server/ClientInfoResource.java b/server/src/main/java/io/druid/server/ClientInfoResource.java index b32091960c9..32e678ea86a 100644 --- a/server/src/main/java/io/druid/server/ClientInfoResource.java +++ b/server/src/main/java/io/druid/server/ClientInfoResource.java @@ -34,7 +34,9 @@ import io.druid.client.FilteredServerInventoryView; import io.druid.client.ServerViewUtil; import io.druid.client.TimelineServerView; import io.druid.client.selector.ServerSelector; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.logger.Logger; import io.druid.query.LocatedSegmentDescriptor; @@ -172,7 +174,7 @@ public class ClientInfoResource DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { - theInterval = new Interval(interval); + theInterval = Intervals.of(interval); } TimelineLookup timeline = timelineServerView.getTimeline(new TableDataSource(dataSourceName)); @@ -259,7 +261,7 @@ public class ClientInfoResource DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { - theInterval = new Interval(interval); + theInterval = Intervals.of(interval); } for (DataSegment segment : segments) { @@ -292,7 +294,7 @@ public class ClientInfoResource DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { - theInterval = new Interval(interval); + theInterval = Intervals.of(interval); } for (DataSegment segment : segments) { @@ -317,7 +319,7 @@ public class ClientInfoResource { List intervalList = Lists.newArrayList(); for (String interval : intervals.split(",")) { - intervalList.add(Interval.parse(interval.trim())); + intervalList.add(Intervals.of(interval.trim())); } List condensed = JodaUtils.condenseIntervals(intervalList); return ServerViewUtil.getTargetLocations(timelineServerView, datasource, condensed, numCandidates); @@ -325,7 +327,7 @@ public class ClientInfoResource protected DateTime getCurrentTime() { - return new DateTime(); + return DateTimes.nowUtc(); } diff --git a/server/src/main/java/io/druid/server/QueryLifecycle.java b/server/src/main/java/io/druid/server/QueryLifecycle.java index 4104e1638e2..193daa19a95 100644 --- a/server/src/main/java/io/druid/server/QueryLifecycle.java +++ b/server/src/main/java/io/druid/server/QueryLifecycle.java @@ -22,6 +22,7 @@ package io.druid.server; import com.google.common.base.Strings; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DirectDruidClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; @@ -44,7 +45,6 @@ import io.druid.server.security.AuthConfig; import io.druid.server.security.AuthorizationInfo; import io.druid.server.security.Resource; import io.druid.server.security.ResourceType; -import org.joda.time.DateTime; import javax.annotation.Nullable; import java.util.LinkedHashMap; @@ -303,7 +303,7 @@ public class QueryLifecycle requestLogger.log( new RequestLogLine( - new DateTime(startMs), + DateTimes.utc(startMs), Strings.nullToEmpty(remoteAddress), queryPlus.getQuery(), new QueryStats(statsMap) diff --git a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java index e1de8612e43..da221868960 100644 --- a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java +++ b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java @@ -29,6 +29,7 @@ import io.druid.audit.AuditEntry; import io.druid.audit.AuditManager; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; @@ -165,7 +166,7 @@ public class SQLAuditManager implements AuditManager { final Interval theInterval; if (interval == null) { - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); theInterval = new Interval(now.minus(config.getAuditHistoryMillis()), now); } else { theInterval = interval; diff --git a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java index e8c3ee051d6..e5df3079760 100644 --- a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java +++ b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java @@ -32,6 +32,7 @@ import com.google.common.util.concurrent.SettableFuture; import com.google.inject.Inject; import io.druid.common.utils.UUIDUtils; import io.druid.curator.announcement.Announcer; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -39,7 +40,6 @@ import io.druid.server.initialization.BatchDataSegmentAnnouncerConfig; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; import org.apache.curator.utils.ZKPaths; -import org.joda.time.DateTime; import javax.annotation.Nullable; import java.io.IOException; @@ -313,7 +313,7 @@ public class BatchDataSegmentAnnouncer implements DataSegmentAnnouncer server.getHost(), server.getType().toString(), server.getTier(), - new DateTime().toString() + DateTimes.nowUtc().toString() ) ); } diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index 9e963062606..59200171ed7 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -45,6 +45,7 @@ import io.druid.curator.discovery.ServiceAnnouncer; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.CoordinatorIndexingServiceHelper; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; @@ -238,7 +239,7 @@ public class DruidCoordinator return retVal; } - final DateTime now = new DateTime(); + final DateTime now = DateTimes.nowUtc(); for (final DataSegment segment : getAvailableDataSegments()) { final List rules = metadataRuleManager.getRulesWithDefault(segment.getDataSource()); @@ -828,7 +829,7 @@ public class DruidCoordinator .withDatabaseRuleManager(metadataRuleManager) .withLoadManagementPeons(loadManagementPeons) .withSegmentReplicantLookup(segmentReplicantLookup) - .withBalancerReferenceTimestamp(DateTime.now()) + .withBalancerReferenceTimestamp(DateTimes.nowUtc()) .build(); } }, diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java index 54afd9d7148..183cb781e8a 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java @@ -23,6 +23,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DruidDataSource; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataRuleManager; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; @@ -223,7 +224,7 @@ public class DruidCoordinatorRuntimeParams this.emitter = null; this.stats = new CoordinatorStats(); this.coordinatorDynamicConfig = new CoordinatorDynamicConfig.Builder().build(); - this.balancerReferenceTimestamp = DateTime.now(); + this.balancerReferenceTimestamp = DateTimes.nowUtc(); } Builder( diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java index c4a93feff6c..7924e45adbb 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java @@ -22,6 +22,7 @@ package io.druid.server.coordinator.helper; import com.google.common.collect.Lists; import com.metamx.common.guava.Comparators; import com.metamx.emitter.EmittingLogger; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataRuleManager; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; @@ -126,7 +127,7 @@ public class DruidCoordinatorRuleRunner implements DruidCoordinatorHelper .build(); // Run through all matched rules for available segments - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); MetadataRuleManager databaseRuleManager = paramsWithReplicationManager.getDatabaseRuleManager(); final List segmentsWithMissingRules = Lists.newArrayListWithCapacity(MAX_MISSING_RULES); diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java index ccdcf36b7f5..a2b05a60732 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java @@ -24,7 +24,8 @@ import com.google.common.base.Preconditions; import com.google.inject.Inject; import io.druid.client.indexing.IndexingServiceClient; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.coordinator.DruidCoordinatorConfig; @@ -121,11 +122,7 @@ public class DruidCoordinatorSegmentKiller implements DruidCoordinatorHelper { List unusedSegmentIntervals = segmentManager.getUnusedSegmentIntervals( dataSource, - new Interval( - 0, - System.currentTimeMillis() - - retainDuration - ), + new Interval(DateTimes.EPOCH, DateTimes.nowUtc().minus(retainDuration)), limit ); diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java index 275758f4b2f..708e73287a6 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java @@ -32,6 +32,7 @@ import com.google.inject.Inject; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.guava.FunctionalIterable; @@ -99,7 +100,7 @@ public class DruidCoordinatorSegmentMerger implements DruidCoordinatorHelper // Get serviced segments from the timeline VersionedIntervalTimeline timeline = entry.getValue(); List> timelineObjects = - timeline.lookup(new Interval(new DateTime(0), new DateTime("3000-01-01"))); + timeline.lookup(new Interval(DateTimes.EPOCH, DateTimes.of("3000-01-01"))); // Accumulate timelineObjects greedily until we reach our limits, then backtrack to the maximum complete set SegmentsToMerge segmentsToMerge = new SegmentsToMerge(); diff --git a/server/src/main/java/io/druid/server/http/ClusterResource.java b/server/src/main/java/io/druid/server/http/ClusterResource.java index 4092ed34a38..1de5eb55531 100644 --- a/server/src/main/java/io/druid/server/http/ClusterResource.java +++ b/server/src/main/java/io/druid/server/http/ClusterResource.java @@ -25,6 +25,7 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.discovery.DiscoveryDruidNode; import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.LazySingleton; +import io.druid.java.util.common.StringUtils; import io.druid.server.http.security.StateResourceFilter; import javax.ws.rs.GET; @@ -98,7 +99,7 @@ public class ClusterResource if (nodeType == null || !DruidNodeDiscoveryProvider.ALL_NODE_TYPES.contains(nodeType)) { return Response.serverError() .status(Response.Status.BAD_REQUEST) - .entity(String.format( + .entity(StringUtils.format( "Invalid nodeType [%s]. Valid node types are %s .", nodeType, DruidNodeDiscoveryProvider.ALL_NODE_TYPES diff --git a/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java b/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java index 2f6e9973f53..208dcf387be 100644 --- a/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java +++ b/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java @@ -24,6 +24,7 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.Intervals; import io.druid.server.coordinator.CoordinatorDynamicConfig; import io.druid.server.http.security.ConfigResourceFilter; import org.joda.time.Interval; @@ -105,7 +106,7 @@ public class CoordinatorDynamicConfigsResource @QueryParam("count") final Integer count ) { - Interval theInterval = interval == null ? null : new Interval(interval); + Interval theInterval = interval == null ? null : Intervals.of(interval); if (theInterval == null && count != null) { try { return Response.ok( diff --git a/server/src/main/java/io/druid/server/http/DatasourcesResource.java b/server/src/main/java/io/druid/server/http/DatasourcesResource.java index df2d725d7e9..27f6b5a239d 100644 --- a/server/src/main/java/io/druid/server/http/DatasourcesResource.java +++ b/server/src/main/java/io/druid/server/http/DatasourcesResource.java @@ -33,6 +33,8 @@ import io.druid.client.DruidServer; import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.client.SegmentLoadInfo; import io.druid.client.indexing.IndexingServiceClient; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.guava.Comparators; @@ -208,7 +210,7 @@ public class DatasourcesResource if (kill != null && Boolean.valueOf(kill)) { try { - indexingServiceClient.killSegments(dataSourceName, new Interval(interval)); + indexingServiceClient.killSegments(dataSourceName, Intervals.of(interval)); } catch (IllegalArgumentException e) { return Response.status(Response.Status.BAD_REQUEST) @@ -253,9 +255,9 @@ public class DatasourcesResource if (indexingServiceClient == null) { return Response.ok(ImmutableMap.of("error", "no indexing service found")).build(); } - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); try { - indexingServiceClient.killSegments(dataSourceName, new Interval(theInterval)); + indexingServiceClient.killSegments(dataSourceName, theInterval); } catch (Exception e) { return Response.serverError() @@ -343,7 +345,7 @@ public class DatasourcesResource ) { final DruidDataSource dataSource = getDataSource(dataSourceName); - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); if (dataSource == null) { return Response.noContent().build(); @@ -589,8 +591,8 @@ public class DatasourcesResource Map> tierDistinctSegments = Maps.newHashMap(); long totalSegmentSize = 0; - long minTime = Long.MAX_VALUE; - long maxTime = Long.MIN_VALUE; + DateTime minTime = DateTimes.MAX; + DateTime maxTime = DateTimes.MIN; String tier; for (DruidServer druidServer : serverInventoryView.getInventory()) { DruidDataSource druidDataSource = druidServer.getDataSource(dataSourceName); @@ -616,12 +618,8 @@ public class DatasourcesResource totalSegmentSize += dataSegment.getSize(); totalDistinctSegments.add(dataSegment.getIdentifier()); - if (dataSegment.getInterval().getStartMillis() < minTime) { - minTime = dataSegment.getInterval().getStartMillis(); - } - if (dataSegment.getInterval().getEndMillis() > maxTime) { - maxTime = dataSegment.getInterval().getEndMillis(); - } + minTime = DateTimes.min(minTime, dataSegment.getInterval().getStart()); + maxTime = DateTimes.max(maxTime, dataSegment.getInterval().getEnd()); } } @@ -639,8 +637,8 @@ public class DatasourcesResource segments.put("count", totalDistinctSegments.size()); segments.put("size", totalSegmentSize); - segments.put("minTime", new DateTime(minTime)); - segments.put("maxTime", new DateTime(maxTime)); + segments.put("minTime", minTime); + segments.put("maxTime", maxTime); return retVal; } @@ -661,7 +659,7 @@ public class DatasourcesResource TimelineLookup timeline = serverInventoryView.getTimeline( new TableDataSource(dataSourceName) ); - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); if (timeline == null) { log.debug("No timeline found for datasource[%s]", dataSourceName); return Response.ok(Lists.newArrayList()).build(); diff --git a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java index c3552e10d91..81636dbe261 100644 --- a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java +++ b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java @@ -21,6 +21,7 @@ package io.druid.server.http; import com.google.common.base.Preconditions; import com.google.common.net.HostAndPort; +import io.druid.java.util.common.StringUtils; public class HostAndPortWithScheme { @@ -56,8 +57,9 @@ public class HostAndPortWithScheme private static String checkAndGetScheme(String scheme) { - Preconditions.checkState(scheme.toLowerCase().equals("http") || scheme.toLowerCase().equals("https")); - return scheme.toLowerCase(); + String schemeLowerCase = StringUtils.toLowerCase(scheme); + Preconditions.checkState(schemeLowerCase.equals("http") || schemeLowerCase.equals("https")); + return schemeLowerCase; } public String getScheme() @@ -88,7 +90,7 @@ public class HostAndPortWithScheme @Override public String toString() { - return String.format("%s:%s", scheme, hostAndPort.toString()); + return StringUtils.format("%s:%s", scheme, hostAndPort.toString()); } @Override diff --git a/server/src/main/java/io/druid/server/http/IntervalsResource.java b/server/src/main/java/io/druid/server/http/IntervalsResource.java index 630531252c2..66478a116e5 100644 --- a/server/src/main/java/io/druid/server/http/IntervalsResource.java +++ b/server/src/main/java/io/druid/server/http/IntervalsResource.java @@ -21,9 +21,9 @@ package io.druid.server.http; import com.google.common.collect.Maps; import com.google.inject.Inject; - import io.druid.client.DruidDataSource; import io.druid.client.InventoryView; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.guava.Comparators; import io.druid.server.security.AuthConfig; @@ -99,7 +99,7 @@ public class IntervalsResource @Context final HttpServletRequest req ) { - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); final Set datasources = authConfig.isEnabled() ? InventoryViewUtils.getSecuredDataSources( serverInventoryView, diff --git a/server/src/main/java/io/druid/server/http/RulesResource.java b/server/src/main/java/io/druid/server/http/RulesResource.java index e7eabccb577..d56333470a8 100644 --- a/server/src/main/java/io/druid/server/http/RulesResource.java +++ b/server/src/main/java/io/druid/server/http/RulesResource.java @@ -25,6 +25,7 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataRuleManager; import io.druid.server.coordinator.rules.Rule; import io.druid.server.http.security.RulesResourceFilter; @@ -164,7 +165,7 @@ public class RulesResource return auditManager.fetchAuditHistory("rules", count); } - Interval theInterval = interval == null ? null : new Interval(interval); + Interval theInterval = interval == null ? null : Intervals.of(interval); if (dataSourceName != null) { return auditManager.fetchAuditHistory(dataSourceName, "rules", theInterval); } diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java index f87c825d8c9..809e4e1827c 100644 --- a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java +++ b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java @@ -27,7 +27,6 @@ import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; import io.druid.server.http.HostAndPortWithScheme; import org.apache.curator.utils.ZKPaths; -import org.joda.time.DateTime; import java.nio.ByteBuffer; @@ -38,7 +37,7 @@ public abstract class ListenerResourceAnnouncer { private static final byte[] ANNOUNCE_BYTES = ByteBuffer .allocate(Longs.BYTES) - .putLong(DateTime.now().getMillis()) + .putLong(System.currentTimeMillis()) .array(); private static final Logger LOG = new Logger(ListenerResourceAnnouncer.class); private final Object startStopSync = new Object(); diff --git a/server/src/main/java/io/druid/server/log/FileRequestLogger.java b/server/src/main/java/io/druid/server/log/FileRequestLogger.java index b0beed59445..9f29f33e236 100644 --- a/server/src/main/java/io/druid/server/log/FileRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/FileRequestLogger.java @@ -22,6 +22,7 @@ package io.druid.server.log; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Throwables; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.guava.CloseQuietly; @@ -31,6 +32,7 @@ import io.druid.server.RequestLogLine; import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.MutableDateTime; +import org.joda.time.chrono.ISOChronology; import java.io.File; import java.io.FileNotFoundException; @@ -66,15 +68,15 @@ public class FileRequestLogger implements RequestLogger try { baseDir.mkdirs(); - MutableDateTime mutableDateTime = new DateTime().toMutableDateTime(); + MutableDateTime mutableDateTime = DateTimes.nowUtc().toMutableDateTime(ISOChronology.getInstanceUTC()); mutableDateTime.setMillisOfDay(0); synchronized (lock) { - currentDay = mutableDateTime.toDateTime(); + currentDay = mutableDateTime.toDateTime(ISOChronology.getInstanceUTC()); fileWriter = getFileWriter(); } long nextDay = currentDay.plusDays(1).getMillis(); - Duration initialDelay = new Duration(nextDay - new DateTime().getMillis()); + Duration initialDelay = new Duration(nextDay - System.currentTimeMillis()); ScheduledExecutors.scheduleWithFixedDelay( exec, diff --git a/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java b/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java index 72f0cd8c9e5..222bf2a60e4 100644 --- a/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java +++ b/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java @@ -27,6 +27,7 @@ import com.metamx.emitter.EmittingLogger; import io.druid.client.selector.HostSelector; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -146,7 +147,7 @@ public class TieredBrokerHostSelector implements HostSelector List rules = ruleManager.getRulesWithDefault(Iterables.getFirst(query.getDataSource().getNames(), null)); // find the rule that can apply to the entire set of intervals - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); int lastRulePosition = -1; LoadRule baseRule = null; diff --git a/server/src/test/java/io/druid/client/BrokerServerViewTest.java b/server/src/test/java/io/druid/client/BrokerServerViewTest.java index b16c6684f45..2dae5d4310a 100644 --- a/server/src/test/java/io/druid/client/BrokerServerViewTest.java +++ b/server/src/test/java/io/druid/client/BrokerServerViewTest.java @@ -34,6 +34,7 @@ import io.druid.client.selector.RandomServerSelectorStrategy; import io.druid.client.selector.ServerSelector; import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.QueryToolChestWarehouse; import io.druid.query.QueryWatcher; @@ -114,14 +115,14 @@ public class BrokerServerViewTest extends CuratorTestBase TimelineLookup timeline = brokerServerView.getTimeline(new TableDataSource("test_broker_server_view")); List serverLookupRes = (List) timeline.lookup( - new Interval( + Intervals.of( "2014-10-20T00:00:00Z/P1D" ) ); Assert.assertEquals(1, serverLookupRes.size()); TimelineObjectHolder actualTimelineObjectHolder = serverLookupRes.get(0); - Assert.assertEquals(new Interval("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); + Assert.assertEquals(Intervals.of("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); Assert.assertEquals("v1", actualTimelineObjectHolder.getVersion()); PartitionHolder actualPartitionHolder = actualTimelineObjectHolder.getObject(); @@ -139,9 +140,9 @@ public class BrokerServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2014-10-20T00:00:00Z/P1D"))).size() + ((List) timeline.lookup(Intervals.of("2014-10-20T00:00:00Z/P1D"))).size() ); - Assert.assertNull(timeline.findEntry(new Interval("2014-10-20T00:00:00Z/P1D"), "v1")); + Assert.assertNull(timeline.findEntry(Intervals.of("2014-10-20T00:00:00Z/P1D"), "v1")); } @Test @@ -210,7 +211,7 @@ public class BrokerServerViewTest extends CuratorTestBase createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), (List) timeline.lookup( - new Interval( + Intervals.of( "2011-04-01/2011-04-09" ) ) @@ -232,7 +233,7 @@ public class BrokerServerViewTest extends CuratorTestBase createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), (List) timeline.lookup( - new Interval( + Intervals.of( "2011-04-01/2011-04-09" ) ) @@ -249,7 +250,7 @@ public class BrokerServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2011-04-01/2011-04-09"))).size() + ((List) timeline.lookup(Intervals.of("2011-04-01/2011-04-09"))).size() ); } @@ -260,7 +261,7 @@ public class BrokerServerViewTest extends CuratorTestBase DataSegment segment ) { - return Pair.of(new Interval(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); + return Pair.of(Intervals.of(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); } private void assertValues( @@ -349,7 +350,7 @@ public class BrokerServerViewTest extends CuratorTestBase { return DataSegment.builder() .dataSource("test_broker_server_view") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java index e6814f166ed..936e538a811 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java @@ -29,6 +29,7 @@ import io.druid.client.cache.MapCache; import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.client.selector.TierSelectorStrategy; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequence; import io.druid.query.DataSource; import io.druid.query.Druids; @@ -81,9 +82,9 @@ public class CachingClusteredClientFunctionalityTest @Test public void testUncoveredInterval() throws Exception { - addToTimeline(new Interval("2015-01-02/2015-01-03"), "1"); - addToTimeline(new Interval("2015-01-04/2015-01-05"), "1"); - addToTimeline(new Interval("2015-02-04/2015-02-05"), "1"); + addToTimeline(Intervals.of("2015-01-02/2015-01-03"), "1"); + addToTimeline(Intervals.of("2015-01-04/2015-01-05"), "1"); + addToTimeline(Intervals.of("2015-02-04/2015-02-05"), "1"); final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() .dataSource("test") @@ -145,7 +146,7 @@ public class CachingClusteredClientFunctionalityTest { List expectedList = Lists.newArrayListWithExpectedSize(intervals.length); for (String interval : intervals) { - expectedList.add(new Interval(interval)); + expectedList.add(Intervals.of(interval)); } Assert.assertEquals((Object) expectedList, context.get("uncoveredIntervals")); Assert.assertEquals(uncoveredIntervalsOverflowed, context.get("uncoveredIntervalsOverflowed")); diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index b764421048d..a68e5c573fe 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -55,7 +55,9 @@ import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.hll.HyperLogLogCollector; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; @@ -465,19 +467,19 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05"), 85, 102, - new DateTime("2011-01-06"), 412, 521, - new DateTime("2011-01-07"), 122, 21894, - new DateTime("2011-01-08"), 5, 20, - new DateTime("2011-01-09"), 18, 521 + DateTimes.of("2011-01-05"), 85, 102, + DateTimes.of("2011-01-06"), 412, 521, + DateTimes.of("2011-01-07"), 122, 21894, + DateTimes.of("2011-01-08"), 5, 20, + DateTimes.of("2011-01-09"), 18, 521 ), - new Interval("2011-01-10/2011-01-13"), + Intervals.of("2011-01-10/2011-01-13"), makeTimeResults( - new DateTime("2011-01-10"), 85, 102, - new DateTime("2011-01-11"), 412, 521, - new DateTime("2011-01-12"), 122, 21894 + DateTimes.of("2011-01-10"), 85, 102, + DateTimes.of("2011-01-11"), 412, 521, + DateTimes.of("2011-01-12"), 122, 21894 ) ); } @@ -504,26 +506,26 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000), - new Interval("2011-01-02/2011-01-03"), makeTimeResults(new DateTime("2011-01-02"), 30, 6000), - new Interval("2011-01-04/2011-01-05"), makeTimeResults(new DateTime("2011-01-04"), 23, 85312), + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), + Intervals.of("2011-01-02/2011-01-03"), makeTimeResults(DateTimes.of("2011-01-02"), 30, 6000), + Intervals.of("2011-01-04/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 23, 85312), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05"), 85, 102, - new DateTime("2011-01-06"), 412, 521, - new DateTime("2011-01-07"), 122, 21894, - new DateTime("2011-01-08"), 5, 20, - new DateTime("2011-01-09"), 18, 521 + DateTimes.of("2011-01-05"), 85, 102, + DateTimes.of("2011-01-06"), 412, 521, + DateTimes.of("2011-01-07"), 122, 21894, + DateTimes.of("2011-01-08"), 5, 20, + DateTimes.of("2011-01-09"), 18, 521 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05T01"), 80, 100, - new DateTime("2011-01-06T01"), 420, 520, - new DateTime("2011-01-07T01"), 12, 2194, - new DateTime("2011-01-08T01"), 59, 201, - new DateTime("2011-01-09T01"), 181, 52 + DateTimes.of("2011-01-05T01"), 80, 100, + DateTimes.of("2011-01-06T01"), 420, 520, + DateTimes.of("2011-01-07T01"), 12, 2194, + DateTimes.of("2011-01-08T01"), 59, 201, + DateTimes.of("2011-01-09T01"), 181, 52 ) ); @@ -535,19 +537,19 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTimeResults( - new DateTime("2011-01-01"), 50, 5000, - new DateTime("2011-01-02"), 30, 6000, - new DateTime("2011-01-04"), 23, 85312, - new DateTime("2011-01-05"), 85, 102, - new DateTime("2011-01-05T01"), 80, 100, - new DateTime("2011-01-06"), 412, 521, - new DateTime("2011-01-06T01"), 420, 520, - new DateTime("2011-01-07"), 122, 21894, - new DateTime("2011-01-07T01"), 12, 2194, - new DateTime("2011-01-08"), 5, 20, - new DateTime("2011-01-08T01"), 59, 201, - new DateTime("2011-01-09"), 18, 521, - new DateTime("2011-01-09T01"), 181, 52 + DateTimes.of("2011-01-01"), 50, 5000, + DateTimes.of("2011-01-02"), 30, 6000, + DateTimes.of("2011-01-04"), 23, 85312, + DateTimes.of("2011-01-05"), 85, 102, + DateTimes.of("2011-01-05T01"), 80, 100, + DateTimes.of("2011-01-06"), 412, 521, + DateTimes.of("2011-01-06T01"), 420, 520, + DateTimes.of("2011-01-07"), 122, 21894, + DateTimes.of("2011-01-07T01"), 12, 2194, + DateTimes.of("2011-01-08"), 5, 20, + DateTimes.of("2011-01-08T01"), 59, 201, + DateTimes.of("2011-01-09"), 18, 521, + DateTimes.of("2011-01-09T01"), 181, 52 ), runner.run(QueryPlus.wrap(query), context) ); @@ -559,7 +561,7 @@ public class CachingClusteredClientTest public void testCachingOverBulkLimitEnforcesLimit() throws Exception { final int limit = 10; - final Interval interval = new Interval("2011-01-01/2011-01-02"); + final Interval interval = Intervals.of("2011-01-01/2011-01-02"); final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(DATA_SOURCE) .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(interval))) @@ -631,21 +633,21 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05T02"), 80, 100, - new DateTime("2011-01-06T02"), 420, 520, - new DateTime("2011-01-07T02"), 12, 2194, - new DateTime("2011-01-08T02"), 59, 201, - new DateTime("2011-01-09T02"), 181, 52 + DateTimes.of("2011-01-05T02"), 80, 100, + DateTimes.of("2011-01-06T02"), 420, 520, + DateTimes.of("2011-01-07T02"), 12, 2194, + DateTimes.of("2011-01-08T02"), 59, 201, + DateTimes.of("2011-01-09T02"), 181, 52 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05T00"), 85, 102, - new DateTime("2011-01-06T00"), 412, 521, - new DateTime("2011-01-07T00"), 122, 21894, - new DateTime("2011-01-08T00"), 5, 20, - new DateTime("2011-01-09T00"), 18, 521 + DateTimes.of("2011-01-05T00"), 85, 102, + DateTimes.of("2011-01-06T00"), 412, 521, + DateTimes.of("2011-01-07T00"), 122, 21894, + DateTimes.of("2011-01-08T00"), 5, 20, + DateTimes.of("2011-01-09T00"), 18, 521 ) ); @@ -656,16 +658,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTimeResults( - new DateTime("2011-01-05T00"), 85, 102, - new DateTime("2011-01-05T02"), 80, 100, - new DateTime("2011-01-06T00"), 412, 521, - new DateTime("2011-01-06T02"), 420, 520, - new DateTime("2011-01-07T00"), 122, 21894, - new DateTime("2011-01-07T02"), 12, 2194, - new DateTime("2011-01-08T00"), 5, 20, - new DateTime("2011-01-08T02"), 59, 201, - new DateTime("2011-01-09T00"), 18, 521, - new DateTime("2011-01-09T02"), 181, 52 + DateTimes.of("2011-01-05T00"), 85, 102, + DateTimes.of("2011-01-05T02"), 80, 100, + DateTimes.of("2011-01-06T00"), 412, 521, + DateTimes.of("2011-01-06T02"), 420, 520, + DateTimes.of("2011-01-07T00"), 122, 21894, + DateTimes.of("2011-01-07T02"), 12, 2194, + DateTimes.of("2011-01-08T00"), 5, 20, + DateTimes.of("2011-01-08T02"), 59, 201, + DateTimes.of("2011-01-09T00"), 18, 521, + DateTimes.of("2011-01-09T02"), 181, 52 ), runner.run(QueryPlus.wrap(query), Maps.newHashMap()) ); @@ -693,7 +695,7 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-11-04/2011-11-08"), + Intervals.of("2011-11-04/2011-11-08"), makeTimeResults( new DateTime("2011-11-04", TIMEZONE), 50, 5000, new DateTime("2011-11-05", TIMEZONE), 30, 6000, @@ -744,7 +746,7 @@ public class CachingClusteredClientTest "populateCache", "true" ) ).build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000) ); Assert.assertEquals(1, cache.getStats().getNumEntries()); @@ -763,7 +765,7 @@ public class CachingClusteredClientTest "populateCache", "false" ) ).build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000) ); Assert.assertEquals(0, cache.getStats().getNumEntries()); @@ -780,7 +782,7 @@ public class CachingClusteredClientTest "populateCache", "false" ) ).build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000) ); Assert.assertEquals(0, cache.getStats().getNumEntries()); @@ -815,28 +817,28 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeTopNResultsWithoutRename(new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998), + Intervals.of("2011-01-01/2011-01-02"), + makeTopNResultsWithoutRename(DateTimes.of("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998), - new Interval("2011-01-02/2011-01-03"), - makeTopNResultsWithoutRename(new DateTime("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995), + Intervals.of("2011-01-02/2011-01-03"), + makeTopNResultsWithoutRename(DateTimes.of("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); HashMap context = new HashMap(); @@ -848,18 +850,18 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTopNResults( - new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998, - new DateTime("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995, - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998, + DateTimes.of("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995, + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), runner.run(QueryPlus.wrap(query), context) ); @@ -891,7 +893,7 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-11-04/2011-11-08"), + Intervals.of("2011-11-04/2011-11-08"), makeTopNResultsWithoutRename( new DateTime("2011-11-04", TIMEZONE), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-11-05", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, @@ -925,30 +927,30 @@ public class CachingClusteredClientTest ImmutableList.of( Sequences.simple( makeTopNResultsWithoutRename( - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ), Sequences.simple( makeTopNResultsWithoutRename( - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ) ); TestHelper.assertExpectedResults( makeTopNResultsWithoutRename( - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), mergeSequences( new TopNQueryBuilder() @@ -995,28 +997,28 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); @@ -1029,16 +1031,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTopNResults( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), runner.run(QueryPlus.wrap(query), context) ); @@ -1068,28 +1070,28 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); @@ -1102,16 +1104,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), runner.run(QueryPlus.wrap(query), context) ); @@ -1133,30 +1135,30 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), + Intervals.of("2011-01-01/2011-01-02"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), - new Interval("2011-01-02/2011-01-03"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), + Intervals.of("2011-01-02/2011-01-03"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ) ); @@ -1170,18 +1172,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSearchResults( TOP_DIM, - new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, - new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, + DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1203,30 +1205,30 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), + Intervals.of("2011-01-01/2011-01-02"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), - new Interval("2011-01-02/2011-01-03"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), + Intervals.of("2011-01-02/2011-01-03"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ) ); @@ -1240,18 +1242,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSearchResults( TOP_DIM, - new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, - new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, + DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1262,18 +1264,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSearchResults( "new_dim", - new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, - new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, + DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(query), context) ); @@ -1298,26 +1300,26 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), + Intervals.of("2011-01-01/2011-01-02"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), - new Interval("2011-01-02/2011-01-03"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), + Intervals.of("2011-01-02/2011-01-03"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), - new Interval("2011-01-05/2011-01-10"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) + Intervals.of("2011-01-05/2011-01-10"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) ), - new Interval("2011-01-05/2011-01-10"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + Intervals.of("2011-01-05/2011-01-10"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ) ); @@ -1331,18 +1333,18 @@ public class CachingClusteredClientTest ); HashMap context = new HashMap(); TestHelper.assertExpectedResults( - makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), - new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), - new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), + DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1367,30 +1369,30 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), + Intervals.of("2011-01-01/2011-01-02"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), - new Interval("2011-01-02/2011-01-03"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), + Intervals.of("2011-01-02/2011-01-03"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSelectResults( dimensions, metrics, - new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSelectResults( dimensions, metrics, - new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ) ); @@ -1406,18 +1408,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSelectResults( dimensions, metrics, - new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), - new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), - new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), + DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1429,18 +1431,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSelectResults( dimensions, metrics, - new DateTime("2011-01-01"), ImmutableMap.of("a2", "b", "rows", 1), - new DateTime("2011-01-02"), ImmutableMap.of("a2", "c", "rows", 5), - new DateTime("2011-01-05"), ImmutableMap.of("a2", "d", "rows", 5), - new DateTime("2011-01-05T01"), ImmutableMap.of("a2", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a2", "e", "rows", 6), - new DateTime("2011-01-06T01"), ImmutableMap.of("a2", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a2", "f", "rows", 7), - new DateTime("2011-01-07T01"), ImmutableMap.of("a2", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a2", "g", "rows", 8), - new DateTime("2011-01-08T01"), ImmutableMap.of("a2", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a2", "h", "rows", 9), - new DateTime("2011-01-09T01"), ImmutableMap.of("a2", "h", "rows", 9) + DateTimes.of("2011-01-01"), ImmutableMap.of("a2", "b", "rows", 1), + DateTimes.of("2011-01-02"), ImmutableMap.of("a2", "c", "rows", 5), + DateTimes.of("2011-01-05"), ImmutableMap.of("a2", "d", "rows", 5), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a2", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a2", "e", "rows", 6), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a2", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a2", "f", "rows", 7), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a2", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a2", "g", "rows", 8), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a2", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a2", "h", "rows", 9), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a2", "h", "rows", 9) ), runner.run(QueryPlus.wrap(query), context) ); @@ -1473,43 +1475,43 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeGroupByResults( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), ImmutableMap.of("a", "a", "rows", 1, "imps", 1, "impers", 1, "uniques", collector) ), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeGroupByResults( - new DateTime("2011-01-02"), + DateTimes.of("2011-01-02"), ImmutableMap.of("a", "b", "rows", 2, "imps", 2, "impers", 2, "uniques", collector) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05"), + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-06"), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-07"), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-08"), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-09"), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05T01"), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-06T01"), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-07T01"), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-08T01"), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-09T01"), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector) ) ); @@ -1521,25 +1523,25 @@ public class CachingClusteredClientTest HashMap context = new HashMap(); TestHelper.assertExpectedObjects( makeGroupByResults( - new DateTime("2011-01-05T"), + DateTimes.of("2011-01-05T"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-05T01"), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-06T"), + DateTimes.of("2011-01-06T"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-06T01"), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-07T"), + DateTimes.of("2011-01-07T"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-07T01"), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-08T"), + DateTimes.of("2011-01-08T"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-08T01"), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-09T"), + DateTimes.of("2011-01-09T"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector), - new DateTime("2011-01-09T01"), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector) ), runner.run(QueryPlus.wrap(builder.setInterval("2011-01-05/2011-01-10").build()), context), @@ -1557,17 +1559,17 @@ public class CachingClusteredClientTest .intervals(CachingClusteredClientTest.SEG_SPEC) .context(CachingClusteredClientTest.CONTEXT) .build(), - new Interval("2011-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("2011-01-01"), new DateTime("2011-01-01"), new DateTime("2011-01-02")), + Intervals.of("2011-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("2011-01-01"), DateTimes.of("2011-01-01"), DateTimes.of("2011-01-02")), - new Interval("2011-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("2011-01-02"), new DateTime("2011-01-02"), new DateTime("2011-01-03")), + Intervals.of("2011-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("2011-01-02"), DateTimes.of("2011-01-02"), DateTimes.of("2011-01-03")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05"), new DateTime("2011-01-05"), new DateTime("2011-01-10")), + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05"), DateTimes.of("2011-01-05"), DateTimes.of("2011-01-10")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05T01"), new DateTime("2011-01-05T01"), new DateTime("2011-01-10")) + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05T01"), DateTimes.of("2011-01-05T01"), DateTimes.of("2011-01-10")) ); testQueryCaching( @@ -1578,17 +1580,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MAX_TIME) .build(), - new Interval("2011-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("2011-01-01"), null, new DateTime("2011-01-02")), + Intervals.of("2011-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("2011-01-01"), null, DateTimes.of("2011-01-02")), - new Interval("2011-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("2011-01-02"), null, new DateTime("2011-01-03")), + Intervals.of("2011-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("2011-01-02"), null, DateTimes.of("2011-01-03")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05"), null, new DateTime("2011-01-10")), + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05"), null, DateTimes.of("2011-01-10")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05T01"), null, new DateTime("2011-01-10")) + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05T01"), null, DateTimes.of("2011-01-10")) ); testQueryCaching( @@ -1599,17 +1601,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MIN_TIME) .build(), - new Interval("2011-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("2011-01-01"), new DateTime("2011-01-01"), null), + Intervals.of("2011-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("2011-01-01"), DateTimes.of("2011-01-01"), null), - new Interval("2011-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("2011-01-02"), new DateTime("2011-01-02"), null), + Intervals.of("2011-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("2011-01-02"), DateTimes.of("2011-01-02"), null), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05"), new DateTime("2011-01-05"), null), + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05"), DateTimes.of("2011-01-05"), null), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05T01"), new DateTime("2011-01-05T01"), null) + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05T01"), DateTimes.of("2011-01-05T01"), null) ); } @@ -1664,12 +1666,12 @@ public class CachingClusteredClientTest filter result in {[2,2]}, so segments [1,2] and [2,3] is needed */ List>> expectedResult = Arrays.asList( - makeTimeResults(new DateTime("2011-01-01"), 50, 5000, - new DateTime("2011-01-02"), 10, 1252, - new DateTime("2011-01-03"), 20, 6213, - new DateTime("2011-01-04"), 30, 743), - makeTimeResults(new DateTime("2011-01-07"), 60, 6020, - new DateTime("2011-01-08"), 70, 250) + makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000, + DateTimes.of("2011-01-02"), 10, 1252, + DateTimes.of("2011-01-03"), 20, 6213, + DateTimes.of("2011-01-04"), 30, 743), + makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020, + DateTimes.of("2011-01-08"), 70, 250) ); testQueryCachingWithFilter( @@ -1677,16 +1679,16 @@ public class CachingClusteredClientTest 3, builder.build(), expectedResult, - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-02"), 10, 1252), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-03"), 20, 6213), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-04"), 30, 743), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-05"), 40, 6000), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-06"), 50, 425), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-07"), 60, 6020), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-08"), 70, 250), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-09"), 23, 85312), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-10"), 100, 512) + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-02"), 10, 1252), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-03"), 20, 6213), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 30, 743), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-05"), 40, 6000), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-06"), 50, 425), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-08"), 70, 250), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-09"), 23, 85312), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-10"), 100, 512) ); } @@ -1733,9 +1735,9 @@ public class CachingClusteredClientTest TimeseriesQuery query = builder.build(); Map context = new HashMap<>(); - final Interval interval1 = new Interval("2011-01-06/2011-01-07"); - final Interval interval2 = new Interval("2011-01-07/2011-01-08"); - final Interval interval3 = new Interval("2011-01-08/2011-01-09"); + final Interval interval1 = Intervals.of("2011-01-06/2011-01-07"); + final Interval interval2 = Intervals.of("2011-01-07/2011-01-08"); + final Interval interval3 = Intervals.of("2011-01-08/2011-01-09"); QueryRunner runner = new FinalizeResultsQueryRunner( getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest( @@ -2817,7 +2819,7 @@ public class CachingClusteredClientTest { super( "", - new Interval(0, 1), + Intervals.utc(0, 1), "", null, null, @@ -2983,17 +2985,17 @@ public class CachingClusteredClientTest .intervals(CachingClusteredClientTest.SEG_SPEC) .context(CachingClusteredClientTest.CONTEXT) .build(), - new Interval("1970-01-01/1970-01-02"), - makeTimeBoundaryResult(new DateTime("1970-01-01"), new DateTime("1970-01-01"), new DateTime("1970-01-02")), + Intervals.of("1970-01-01/1970-01-02"), + makeTimeBoundaryResult(DateTimes.of("1970-01-01"), DateTimes.of("1970-01-01"), DateTimes.of("1970-01-02")), - new Interval("1970-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("1970-01-02"), new DateTime("1970-01-02"), new DateTime("1970-01-03")), + Intervals.of("1970-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("1970-01-02"), DateTimes.of("1970-01-02"), DateTimes.of("1970-01-03")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05"), new DateTime("1970-01-05"), new DateTime("1970-01-10")), + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05"), DateTimes.of("1970-01-05"), DateTimes.of("1970-01-10")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05T01"), new DateTime("1970-01-05T01"), new DateTime("1970-01-10")) + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05T01"), DateTimes.of("1970-01-05T01"), DateTimes.of("1970-01-10")) ); testQueryCaching( @@ -3004,17 +3006,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MAX_TIME) .build(), - new Interval("1970-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("1970-01-01"), null, new DateTime("1970-01-02")), + Intervals.of("1970-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("1970-01-01"), null, DateTimes.of("1970-01-02")), - new Interval("1970-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("1970-01-02"), null, new DateTime("1970-01-03")), + Intervals.of("1970-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("1970-01-02"), null, DateTimes.of("1970-01-03")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05"), null, new DateTime("1970-01-10")), + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05"), null, DateTimes.of("1970-01-10")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05T01"), null, new DateTime("1970-01-10")) + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05T01"), null, DateTimes.of("1970-01-10")) ); testQueryCaching( @@ -3025,17 +3027,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MIN_TIME) .build(), - new Interval("1970-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("1970-01-01"), new DateTime("1970-01-01"), null), + Intervals.of("1970-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("1970-01-01"), DateTimes.of("1970-01-01"), null), - new Interval("1970-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("1970-01-02"), new DateTime("1970-01-02"), null), + Intervals.of("1970-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("1970-01-02"), DateTimes.of("1970-01-02"), null), - new Interval("1970-01-01/1970-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05"), new DateTime("1970-01-05"), null), + Intervals.of("1970-01-01/1970-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05"), DateTimes.of("1970-01-05"), null), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05T01"), new DateTime("1970-01-05T01"), null) + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05T01"), DateTimes.of("1970-01-05T01"), null) ); } @@ -3054,34 +3056,34 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeGroupByResults( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), ImmutableMap.of("output", "a", "rows", 1, "imps", 1, "impers", 1) ), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeGroupByResults( - new DateTime("2011-01-02"), + DateTimes.of("2011-01-02"), ImmutableMap.of("output", "b", "rows", 2, "imps", 2, "impers", 2) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-06"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-07"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-08"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-09"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) + DateTimes.of("2011-01-05"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-06"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-07"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-08"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-09"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) + DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) ) ); @@ -3092,16 +3094,16 @@ public class CachingClusteredClientTest HashMap context = new HashMap(); TestHelper.assertExpectedObjects( makeGroupByResults( - new DateTime("2011-01-05T"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-06T"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-07T"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-08T"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-09T"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7), - new DateTime("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) + DateTimes.of("2011-01-05T"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-06T"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-07T"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-08T"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-09T"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) ), runner.run(QueryPlus.wrap(builder.setInterval("2011-01-05/2011-01-10").build()), context), "" @@ -3114,16 +3116,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedObjects( makeGroupByResults( - new DateTime("2011-01-05T"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), - new DateTime("2011-01-05T01"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), - new DateTime("2011-01-06T"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), - new DateTime("2011-01-06T01"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), - new DateTime("2011-01-07T"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), - new DateTime("2011-01-07T01"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), - new DateTime("2011-01-08T"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), - new DateTime("2011-01-08T01"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), - new DateTime("2011-01-09T"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7), - new DateTime("2011-01-09T01"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7) + DateTimes.of("2011-01-05T"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), + DateTimes.of("2011-01-06T"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), + DateTimes.of("2011-01-07T"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), + DateTimes.of("2011-01-08T"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), + DateTimes.of("2011-01-09T"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7) ), runner.run(QueryPlus.wrap(query), context), "renamed aggregators test" @@ -3133,7 +3135,7 @@ public class CachingClusteredClientTest @Test public void testIfNoneMatch() throws Exception { - Interval interval = new Interval("2016/2017"); + Interval interval = Intervals.of("2016/2017"); final DataSegment dataSegment = new DataSegment( "dataSource", interval, diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index 3ee8c9ec37a..37c9734c528 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -31,7 +31,9 @@ import io.druid.client.cache.CacheConfig; import io.druid.client.cache.CacheStats; import io.druid.client.cache.MapCache; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; @@ -56,7 +58,6 @@ import io.druid.query.topn.TopNQueryConfig; import io.druid.query.topn.TopNQueryQueryToolChest; import io.druid.query.topn.TopNResultValue; import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -92,11 +93,11 @@ public class CachingQueryRunnerTest ); private static final Object[] objects = new Object[]{ - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 }; private ExecutorService backgroundExecutorService; @@ -156,13 +157,13 @@ public class CachingQueryRunnerTest .build(); Result row1 = new Result( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ); Result row2 = new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -258,7 +259,7 @@ public class CachingQueryRunnerTest }; String segmentIdentifier = "segment"; - SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0); + SegmentDescriptor segmentDescriptor = new SegmentDescriptor(Intervals.of("2011/2012"), "version", 0); DefaultObjectMapper objectMapper = new DefaultObjectMapper(); CachingQueryRunner runner = new CachingQueryRunner( @@ -336,7 +337,7 @@ public class CachingQueryRunnerTest { DefaultObjectMapper objectMapper = new DefaultObjectMapper(); String segmentIdentifier = "segment"; - SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0); + SegmentDescriptor segmentDescriptor = new SegmentDescriptor(Intervals.of("2011/2012"), "version", 0); CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query); Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey( diff --git a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java index 49dc29b71eb..8aa3fc9c556 100644 --- a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java +++ b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java @@ -26,9 +26,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.TableDataSource; import io.druid.server.coordination.DruidServerMetadata; @@ -107,14 +107,12 @@ public class CoordinatorServerViewTest extends CuratorTestBase TimelineLookup timeline = overlordServerView.getTimeline(new TableDataSource("test_overlord_server_view")); List serverLookupRes = (List) timeline.lookup( - new Interval( - "2014-10-20T00:00:00Z/P1D" - ) + Intervals.of("2014-10-20T00:00:00Z/P1D") ); Assert.assertEquals(1, serverLookupRes.size()); TimelineObjectHolder actualTimelineObjectHolder = serverLookupRes.get(0); - Assert.assertEquals(new Interval("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); + Assert.assertEquals(Intervals.of("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); Assert.assertEquals("v1", actualTimelineObjectHolder.getVersion()); PartitionHolder actualPartitionHolder = actualTimelineObjectHolder.getObject(); @@ -133,9 +131,9 @@ public class CoordinatorServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2014-10-20T00:00:00Z/P1D"))).size() + ((List) timeline.lookup(Intervals.of("2014-10-20T00:00:00Z/P1D"))).size() ); - Assert.assertNull(timeline.findEntry(new Interval("2014-10-20T00:00:00Z/P1D"), "v1")); + Assert.assertNull(timeline.findEntry(Intervals.of("2014-10-20T00:00:00Z/P1D"), "v1")); } @Test @@ -204,7 +202,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), (List) timeline.lookup( - new Interval( + Intervals.of( "2011-04-01/2011-04-09" ) ) @@ -225,11 +223,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase createExpected("2011-04-03/2011-04-06", "v1", druidServers.get(1), segments.get(1)), createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), - (List) timeline.lookup( - new Interval( - "2011-04-01/2011-04-09" - ) - ) + (List) timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); // unannounce all the segments @@ -243,7 +237,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2011-04-01/2011-04-09"))).size() + ((List) timeline.lookup(Intervals.of("2011-04-01/2011-04-09"))).size() ); } @@ -264,7 +258,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase DataSegment segment ) { - return Pair.of(new Interval(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); + return Pair.of(Intervals.of(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); } private void assertValues( @@ -345,7 +339,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase { return DataSegment.builder() .dataSource("test_overlord_server_view") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/client/DataSegmentTest.java b/server/src/test/java/io/druid/client/DataSegmentTest.java index 74030bdeb8b..76a32fd6bfb 100644 --- a/server/src/test/java/io/druid/client/DataSegmentTest.java +++ b/server/src/test/java/io/druid/client/DataSegmentTest.java @@ -26,11 +26,12 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.SingleDimensionShardSpec; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -51,7 +52,7 @@ public class DataSegmentTest public void testV1Serialization() throws Exception { - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -106,8 +107,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(NoneShardSpec.instance()) .build(); @@ -122,8 +123,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(new SingleDimensionShardSpec("bar", null, "abc", 0)) .build(); @@ -138,8 +139,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(new SingleDimensionShardSpec("bar", "abc", "def", 1)) .build(); @@ -154,8 +155,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .build(); final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); @@ -196,7 +197,7 @@ public class DataSegmentTest { return DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .size(1) .build(); diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index 425b96fbd7e..ad59b648558 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -34,6 +34,8 @@ import io.druid.client.selector.HighestPriorityTierSelectorStrategy; import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -53,9 +55,7 @@ import org.easymock.EasyMock; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.timeout.ReadTimeoutException; -import org.joda.time.DateTime; import org.joda.time.Duration; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -122,8 +122,8 @@ public class DirectDruidClientTest final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -193,7 +193,7 @@ public class DirectDruidClientTest ); List results = Sequences.toList(s1, Lists.newArrayList()); Assert.assertEquals(1, results.size()); - Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-01-01T01:02:03Z"), results.get(0).getTimestamp()); Assert.assertEquals(3, client1.getNumOpenConnections()); client2.run(QueryPlus.wrap(query), defaultContext); @@ -240,8 +240,8 @@ public class DirectDruidClientTest final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -308,8 +308,8 @@ public class DirectDruidClientTest DataSegment dataSegment = new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), diff --git a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java index 3edd25b372a..7e6fad72b60 100644 --- a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java +++ b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java @@ -36,6 +36,7 @@ import io.druid.client.ServerView; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.guava.Comparators; @@ -80,7 +81,7 @@ import java.util.concurrent.atomic.AtomicInteger; public class BatchServerInventoryViewTest { private static final String testBasePath = "/test"; - public static final DateTime SEGMENT_INTERVAL_START = new DateTime("2013-01-01"); + public static final DateTime SEGMENT_INTERVAL_START = DateTimes.of("2013-01-01"); public static final int INITIAL_SEGMENTS = 100; private static final Timing timing = new Timing(); @@ -382,7 +383,7 @@ public class BatchServerInventoryViewTest SEGMENT_INTERVAL_START.plusDays(offset + 1) ) ) - .version(new DateTime().toString()) + .version(DateTimes.nowUtc().toString()) .build(); } diff --git a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java index 50cf50df6ca..284c456d5f1 100644 --- a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java +++ b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java @@ -23,12 +23,12 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import junit.framework.Assert; -import org.joda.time.Interval; import org.junit.Test; import java.io.IOException; @@ -43,9 +43,7 @@ public class ImmutableSegmentLoadInfoTest ImmutableSegmentLoadInfo segmentLoadInfo = new ImmutableSegmentLoadInfo( new DataSegment( "test_ds", - new Interval( - "2011-04-01/2011-04-02" - ), + Intervals.of("2011-04-01/2011-04-02"), "v1", null, null, diff --git a/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java index 3febded43d6..ca23ce74fec 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java @@ -20,6 +20,7 @@ package io.druid.client.indexing; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -33,9 +34,11 @@ public class ClientAppendQueryTest { private ClientAppendQuery clientAppendQuery; private static final String DATA_SOURCE = "data_source"; + private final DateTime start = DateTimes.nowUtc(); private List segments = Lists.newArrayList( - new DataSegment(DATA_SOURCE, new Interval(new DateTime(), new DateTime().plus(1)), new DateTime().toString(), null, - null, null, null, 0, 0)); + new DataSegment(DATA_SOURCE, new Interval(start, start.plus(1)), start.toString(), null, + null, null, null, 0, 0)); + @Before public void setUp() { diff --git a/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java index bb0a22139e5..db29d181994 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java @@ -19,6 +19,7 @@ package io.druid.client.indexing; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -29,9 +30,10 @@ public class ClientConversionQueryTest { private ClientConversionQuery clientConversionQuery; private static final String DATA_SOURCE = "data_source"; - private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1)); - private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, new DateTime().toString(), null, - null, null, null, 0, 0); + public static final DateTime START = DateTimes.nowUtc(); + private static final Interval INTERVAL = new Interval(START, START.plus(1)); + private static final DataSegment DATA_SEGMENT = + new DataSegment(DATA_SOURCE, INTERVAL, START.toString(), null, null, null, null, 0, 0); @Test public void testGetType() diff --git a/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java index a1dc2db0aaa..6f2b4e8aea3 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java @@ -19,6 +19,7 @@ package io.druid.client.indexing; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; @@ -29,7 +30,8 @@ import org.junit.Test; public class ClientKillQueryTest { private static final String DATA_SOURCE = "data_source"; - private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1)); + public static final DateTime START = DateTimes.nowUtc(); + private static final Interval INTERVAL = new Interval(START, START.plus(1)); ClientKillQuery clientKillQuery; @Before diff --git a/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java index d7791e76553..0019bd3c136 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java @@ -20,6 +20,7 @@ package io.druid.client.indexing; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.query.aggregation.AggregatorFactory; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; @@ -32,8 +33,9 @@ import java.util.List; public class ClientMergeQueryTest { private static final String DATA_SOURCE = "data_source"; - private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1)); - private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, new DateTime().toString(), null, + public static final DateTime START = DateTimes.nowUtc(); + private static final Interval INTERVAL = new Interval(START, START.plus(1)); + private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, START.toString(), null, null, null, null, 0, 0); private static final List SEGMENT_LIST = Lists.newArrayList(DATA_SEGMENT); private static final List AGGREGATOR_LIST = Lists.newArrayList(); diff --git a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java index bdeeb461d36..ecf1d9eaa70 100644 --- a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java +++ b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java @@ -23,11 +23,11 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.client.DirectDruidClient; import io.druid.client.DruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -51,7 +51,7 @@ public class ServerSelectorTest final ServerSelector selector = new ServerSelector( DataSegment.builder() .dataSource("test_broker_server_view") - .interval(new Interval("2012/2013")) + .interval(Intervals.of("2012/2013")) .loadSpec( ImmutableMap.of( "type", @@ -78,8 +78,7 @@ public class ServerSelectorTest DataSegment.builder() .dataSource( "test_broker_server_view") - .interval(new Interval( - "2012/2013")) + .interval(Intervals.of("2012/2013")) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java index 0fb821ca0e1..41f1f6df214 100644 --- a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java +++ b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java @@ -23,13 +23,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.client.DirectDruidClient; import io.druid.client.DruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -118,8 +118,8 @@ public class TierSelectorStrategyTest final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), diff --git a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java index f493d07298b..19d8a665b55 100644 --- a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java +++ b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java @@ -27,6 +27,7 @@ import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.ObjectMetadata; import io.druid.indexing.overlord.SegmentPublishResult; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; @@ -55,7 +56,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final ObjectMapper mapper = new DefaultObjectMapper(); private final DataSegment defaultSegment = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "version", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -67,7 +68,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment defaultSegment2 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "version", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -79,7 +80,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment defaultSegment3 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-03T00Z/2015-01-04T00Z"), + Intervals.of("2015-01-03T00Z/2015-01-04T00Z"), "version", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -92,7 +93,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest // Overshadows defaultSegment, defaultSegment2 private final DataSegment defaultSegment4 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -104,7 +105,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment0of0 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -116,7 +117,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment1of0 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -128,7 +129,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment2of0 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -140,7 +141,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment2of1 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -152,7 +153,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment3of1 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -523,8 +524,8 @@ public class IndexerSQLMetadataStorageCoordinatorTest coordinator.getUsedSegmentsForIntervals( defaultSegment.getDataSource(), ImmutableList.of( - Interval.parse("2015-01-03T00Z/2015-01-03T05Z"), - Interval.parse("2015-01-03T09Z/2015-01-04T00Z") + Intervals.of("2015-01-03T00Z/2015-01-03T05Z"), + Intervals.of("2015-01-03T09Z/2015-01-04T00Z") ) ) ); @@ -554,7 +555,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest Set actualSegments = ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), - Interval.parse("2014-12-31T23:59:59.999Z/2015-01-01T00:00:00.001Z") // end is exclusive + Intervals.of("2014-12-31T23:59:59.999Z/2015-01-01T00:00:00.001Z") // end is exclusive ) ); Assert.assertEquals( @@ -573,7 +574,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), - Interval.parse("2015-1-1T23:59:59.999Z/2015-02-01T00Z") + Intervals.of("2015-1-1T23:59:59.999Z/2015-02-01T00Z") ) ) ); @@ -701,7 +702,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), - Interval.parse("2000/2999") + Intervals.of("2000/2999") ) ) ); diff --git a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java index 9f6074f0b85..3faad305e53 100644 --- a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java +++ b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java @@ -26,12 +26,12 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.metamx.emitter.EmittingLogger; -import io.druid.java.util.common.StringUtils; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -49,7 +49,7 @@ public class MetadataSegmentManagerTest private final DataSegment segment1 = new DataSegment( "wikipedia", - new Interval("2012-03-15T00:00:00.000/2012-03-16T00:00:00.000"), + Intervals.of("2012-03-15T00:00:00.000/2012-03-16T00:00:00.000"), "2012-03-16T00:36:30.848Z", ImmutableMap.of( "type", "s3_zip", @@ -65,7 +65,7 @@ public class MetadataSegmentManagerTest private final DataSegment segment2 = new DataSegment( "wikipedia", - new Interval("2012-01-05T00:00:00.000/2012-01-06T00:00:00.000"), + Intervals.of("2012-01-05T00:00:00.000/2012-01-06T00:00:00.000"), "2012-01-06T22:19:12.565Z", ImmutableMap.of( "type", "s3_zip", @@ -154,12 +154,12 @@ public class MetadataSegmentManagerTest Assert.assertEquals( ImmutableList.of(segment2.getInterval()), - manager.getUnusedSegmentIntervals("wikipedia", new Interval("1970/3000"), 1) + manager.getUnusedSegmentIntervals("wikipedia", Intervals.of("1970/3000"), 1) ); Assert.assertEquals( ImmutableList.of(segment2.getInterval(), segment1.getInterval()), - manager.getUnusedSegmentIntervals("wikipedia", new Interval("1970/3000"), 5) + manager.getUnusedSegmentIntervals("wikipedia", Intervals.of("1970/3000"), 5) ); } } diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java index 954388a5700..5df9c6b535e 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java @@ -29,13 +29,13 @@ import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.server.audit.SQLAuditManager; import io.druid.server.audit.SQLAuditManagerConfig; import io.druid.server.coordinator.rules.IntervalLoadRule; import io.druid.server.coordinator.rules.Rule; import io.druid.server.metrics.NoopServiceEmitter; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -87,7 +87,7 @@ public class SQLMetadataRuleManagerTest { List rules = Arrays.asList( new IntervalLoadRule( - new Interval("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) @@ -111,7 +111,7 @@ public class SQLMetadataRuleManagerTest { List rules = Arrays.asList( new IntervalLoadRule( - new Interval("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) @@ -149,7 +149,7 @@ public class SQLMetadataRuleManagerTest { List rules = Arrays.asList( new IntervalLoadRule( - new Interval("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java index 63f6e668ce9..2942c4c2c78 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java @@ -25,11 +25,9 @@ import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; - import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; - -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -119,7 +117,7 @@ public class SQLMetadataStorageActionHandlerTest final String entryId = "1234"; - handler.insert(entryId, new DateTime("2014-01-02T00:00:00.123"), "testDataSource", entry, true, null); + handler.insert(entryId, DateTimes.of("2014-01-02T00:00:00.123"), "testDataSource", entry, true, null); Assert.assertEquals( Optional.of(entry), @@ -148,7 +146,7 @@ public class SQLMetadataStorageActionHandlerTest Assert.assertEquals( ImmutableList.of(), - handler.getInactiveStatusesSince(new DateTime("2014-01-01")) + handler.getInactiveStatusesSince(DateTimes.of("2014-01-01")) ); Assert.assertTrue(handler.setStatus(entryId, false, status1)); @@ -173,12 +171,12 @@ public class SQLMetadataStorageActionHandlerTest Assert.assertEquals( ImmutableList.of(), - handler.getInactiveStatusesSince(new DateTime("2014-01-03")) + handler.getInactiveStatusesSince(DateTimes.of("2014-01-03")) ); Assert.assertEquals( ImmutableList.of(status1), - handler.getInactiveStatusesSince(new DateTime("2014-01-01")) + handler.getInactiveStatusesSince(DateTimes.of("2014-01-01")) ); } @@ -189,10 +187,10 @@ public class SQLMetadataStorageActionHandlerTest Map entry = ImmutableMap.of("a", 1); Map status = ImmutableMap.of("count", 42); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); thrown.expect(EntryExistsException.class); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); } @Test @@ -202,7 +200,7 @@ public class SQLMetadataStorageActionHandlerTest Map entry = ImmutableMap.of("a", 1); Map status = ImmutableMap.of("count", 42); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableList.of(), @@ -234,7 +232,7 @@ public class SQLMetadataStorageActionHandlerTest Map entry = ImmutableMap.of("a", 1); Map status = ImmutableMap.of("count", 42); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableMap.>of(), diff --git a/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java b/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java index 970e4e9f3ea..9fd01143014 100644 --- a/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java +++ b/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java @@ -21,9 +21,9 @@ package io.druid.query; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -39,7 +39,7 @@ public class LocatedSegmentDescriptorSerdeTest public void testDimensionsSpecSerde() throws Exception { LocatedSegmentDescriptor expected = new LocatedSegmentDescriptor( - new SegmentDescriptor(new Interval(100, 200), "version", 100), + new SegmentDescriptor(Intervals.utc(100, 200), "version", 100), 65535, Arrays.asList( new DruidServerMetadata("server1", "host1", null, 30000L, ServerType.HISTORICAL, "tier1", 0), diff --git a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java index 3185f9b3fe9..c44f7b80efe 100644 --- a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java +++ b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java @@ -20,12 +20,12 @@ package io.druid.realtime.firehose; import com.google.common.collect.Lists; - import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.InputRowParser; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.parsers.ParseException; import io.druid.segment.realtime.firehose.CombiningFirehoseFactory; import io.druid.utils.Runnables; @@ -82,7 +82,7 @@ public class CombiningFirehoseFactoryTest @Override public DateTime getTimestamp() { - return new DateTime(timestamp); + return DateTimes.utc(timestamp); } @Override diff --git a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java index 71f7a462e6d..a305dda6b66 100644 --- a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java +++ b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java @@ -28,13 +28,13 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.TestHelper; import io.druid.segment.indexing.granularity.ArbitraryGranularitySpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -67,7 +67,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); @@ -99,7 +99,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); @@ -131,7 +131,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); schema.getParser(); @@ -160,7 +160,7 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric2", "col2"), new DoubleSumAggregatorFactory("metric1", "col3"), }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); schema.getParser(); @@ -242,7 +242,7 @@ public class DataSchemaTest ); Assert.assertEquals( actual.getGranularitySpec(), - new ArbitraryGranularitySpec(new DurationGranularity(86400000, null), ImmutableList.of(Interval.parse("2014/2015"))) + new ArbitraryGranularitySpec(new DurationGranularity(86400000, null), ImmutableList.of(Intervals.of("2014/2015"))) ); } } diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java index 957d04a8e0d..c77241487bf 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java @@ -24,8 +24,9 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -42,11 +43,11 @@ public class ArbitraryGranularityTest final GranularitySpec spec = new ArbitraryGranularitySpec( null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); Assert.assertNotNull(spec.getQueryGranularity()); } @@ -57,66 +58,66 @@ public class ArbitraryGranularityTest final GranularitySpec spec = new ArbitraryGranularitySpec( Granularities.NONE, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); Assert.assertTrue(spec.isRollup()); Assert.assertEquals( Lists.newArrayList( - new Interval("2012-01-01T00Z/2012-01-03T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z") + Intervals.of("2012-01-01T00Z/2012-01-03T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z") ), Lists.newArrayList(spec.bucketIntervals().get()) ); Assert.assertEquals( "2012-01-03T00Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T00Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T00Z")) ); Assert.assertEquals( "2012-01-03T01Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T01Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T01Z")) ); Assert.assertEquals( "2012-01-04T01Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-04T01Z")) + spec.bucketInterval(DateTimes.of("2012-01-04T01Z")) ); Assert.assertEquals( "2012-01-07T23:59:59.999Z", - Optional.of(new Interval("2012-01-07T00Z/2012-01-08T00Z")), - spec.bucketInterval(new DateTime("2012-01-07T23:59:59.999Z")) + Optional.of(Intervals.of("2012-01-07T00Z/2012-01-08T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-07T23:59:59.999Z")) ); Assert.assertEquals( "2012-01-08T01Z", - Optional.of(new Interval("2012-01-08T00Z/2012-01-11T00Z")), - spec.bucketInterval(new DateTime("2012-01-08T01Z")) + Optional.of(Intervals.of("2012-01-08T00Z/2012-01-11T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-08T01Z")) ); Assert.assertEquals( "2012-01-04T00Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-04T00Z")) + spec.bucketInterval(DateTimes.of("2012-01-04T00Z")) ); Assert.assertEquals( "2012-01-05T00Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-05T00Z")) + spec.bucketInterval(DateTimes.of("2012-01-05T00Z")) ); } @@ -124,9 +125,9 @@ public class ArbitraryGranularityTest public void testOverlapViolation() { List intervals = Lists.newArrayList( - new Interval("2012-01-02T00Z/2012-01-04T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-02T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); boolean thrown = false; @@ -144,11 +145,11 @@ public class ArbitraryGranularityTest public void testRollupSetting() { List intervals = Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, false, intervals); @@ -159,8 +160,8 @@ public class ArbitraryGranularityTest public void testOverlapViolationSameStartInstant() { List intervals = Lists.newArrayList( - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-03T00Z/2012-01-05T00Z") + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-05T00Z") ); boolean thrown = false; @@ -178,11 +179,11 @@ public class ArbitraryGranularityTest public void testJson() { final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); try { diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java index cec80b3e7b0..2ed837c0c29 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java @@ -24,9 +24,10 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.Period; @@ -49,10 +50,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -60,45 +61,45 @@ public class UniformGranularityTest Assert.assertEquals( Lists.newArrayList( - new Interval("2012-01-01T00Z/P1D"), - new Interval("2012-01-02T00Z/P1D"), - new Interval("2012-01-03T00Z/P1D"), - new Interval("2012-01-07T00Z/P1D"), - new Interval("2012-01-08T00Z/P1D"), - new Interval("2012-01-09T00Z/P1D"), - new Interval("2012-01-10T00Z/P1D") + Intervals.of("2012-01-01T00Z/P1D"), + Intervals.of("2012-01-02T00Z/P1D"), + Intervals.of("2012-01-03T00Z/P1D"), + Intervals.of("2012-01-07T00Z/P1D"), + Intervals.of("2012-01-08T00Z/P1D"), + Intervals.of("2012-01-09T00Z/P1D"), + Intervals.of("2012-01-10T00Z/P1D") ), Lists.newArrayList(spec.bucketIntervals().get()) ); Assert.assertEquals( "2012-01-03T00Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T00Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T00Z")) ); Assert.assertEquals( "2012-01-03T01Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T01Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T01Z")) ); Assert.assertEquals( "2012-01-04T01Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-04T01Z")) + spec.bucketInterval(DateTimes.of("2012-01-04T01Z")) ); Assert.assertEquals( "2012-01-07T23:59:59.999Z", - Optional.of(new Interval("2012-01-07T00Z/2012-01-08T00Z")), - spec.bucketInterval(new DateTime("2012-01-07T23:59:59.999Z")) + Optional.of(Intervals.of("2012-01-07T00Z/2012-01-08T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-07T23:59:59.999Z")) ); Assert.assertEquals( "2012-01-08T01Z", - Optional.of(new Interval("2012-01-08T00Z/2012-01-09T00Z")), - spec.bucketInterval(new DateTime("2012-01-08T01Z")) + Optional.of(Intervals.of("2012-01-08T00Z/2012-01-09T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-08T01Z")) ); } @@ -106,10 +107,10 @@ public class UniformGranularityTest public void testRollupSetting() { List intervals = Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); final GranularitySpec spec = new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, false, intervals); @@ -123,10 +124,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -156,10 +157,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -168,10 +169,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -190,10 +191,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -202,10 +203,10 @@ public class UniformGranularityTest Granularities.YEAR, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -214,10 +215,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-12T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-12T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -226,10 +227,10 @@ public class UniformGranularityTest Granularities.DAY, Granularities.ALL, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -242,11 +243,11 @@ public class UniformGranularityTest new PeriodGranularity(new Period("P1D"), null, DateTimeZone.forID("America/Los_Angeles")), null, Lists.newArrayList( - new Interval("2012-01-08T00-08:00/2012-01-11T00-08:00"), - new Interval("2012-01-07T00-08:00/2012-01-08T00-08:00"), - new Interval("2012-01-03T00-08:00/2012-01-04T00-08:00"), - new Interval("2012-01-01T00-08:00/2012-01-03T00-08:00"), - new Interval("2012-09-01T00-07:00/2012-09-03T00-07:00") + Intervals.of("2012-01-08T00-08:00/2012-01-11T00-08:00"), + Intervals.of("2012-01-07T00-08:00/2012-01-08T00-08:00"), + Intervals.of("2012-01-03T00-08:00/2012-01-04T00-08:00"), + Intervals.of("2012-01-01T00-08:00/2012-01-03T00-08:00"), + Intervals.of("2012-09-01T00-07:00/2012-09-03T00-07:00") ) ); diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java index 485e6b95d28..d8eb28c10a7 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java @@ -24,10 +24,10 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -50,7 +50,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_1 = DataSegment.builder() .dataSource("wikipedia") .interval( - new Interval( + Intervals.of( "2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z" ) ) @@ -69,7 +69,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_2 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z" ) ) @@ -77,7 +77,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_3 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" ) ) @@ -86,7 +86,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_4_0 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" ) ) @@ -95,7 +95,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_4_1 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" ) ) diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java index 8240adf2e29..b5eaad7fc9c 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java @@ -21,9 +21,9 @@ package io.druid.segment.loading; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -103,7 +103,7 @@ public class LocalDataSegmentKillerTest { return new DataSegment( "dataSource", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java index 19fb108ec81..efd5633747a 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java @@ -24,10 +24,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.Files; import com.google.common.primitives.Ints; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -50,15 +50,15 @@ public class LocalDataSegmentPusherTest LocalDataSegmentPusherConfig config; File dataSegmentFiles; DataSegment dataSegment = new DataSegment( - "ds", - new Interval(0, 1), - "v1", - null, - null, - null, - NoneShardSpec.instance(), - null, - -1 + "ds", + Intervals.utc(0, 1), + "v1", + null, + null, + null, + NoneShardSpec.instance(), + null, + -1 ); @Before diff --git a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java index cdb72409fa1..ed10455f337 100644 --- a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java +++ b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java @@ -27,11 +27,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.metamx.emitter.EmittingLogger; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.TestHelper; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -375,7 +375,7 @@ public class SegmentLoaderLocalCacheManagerTest { return DataSegment.builder() .dataSource("test_segment_loader") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java index a3a77f67b8f..241edd8f1f6 100644 --- a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java +++ b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java @@ -20,8 +20,8 @@ package io.druid.segment.loading; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -77,7 +77,7 @@ public class StorageLocationTest { return new DataSegment( "test", - new Interval(intervalString), + Intervals.of(intervalString), "1", ImmutableMap.of(), Arrays.asList("d"), diff --git a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java index 49c11be5b07..90b1e66f4b2 100644 --- a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java @@ -38,7 +38,9 @@ import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.InputRowParser; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.BaseQuery; @@ -102,10 +104,10 @@ public class RealtimeManagerTest private static QueryRunnerFactoryConglomerate conglomerate; private static final List rows = Arrays.asList( - makeRow(new DateTime("9000-01-01").getMillis()), + makeRow(DateTimes.of("9000-01-01").getMillis()), makeRow(new ParseException("parse error")), null, - makeRow(new DateTime().getMillis()) + makeRow(System.currentTimeMillis()) ); private RealtimeManager realtimeManager; @@ -212,10 +214,10 @@ public class RealtimeManagerTest null ); plumber = new TestPlumber(new Sink( - new Interval("0/P5000Y"), + Intervals.of("0/P5000Y"), schema, tuningConfig.getShardSpec(), - new DateTime().toString(), + DateTimes.nowUtc().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() )); @@ -232,10 +234,10 @@ public class RealtimeManagerTest EasyMock.createNiceMock(DataSegmentServerAnnouncer.class) ); plumber2 = new TestPlumber(new Sink( - new Interval("0/P5000Y"), + Intervals.of("0/P5000Y"), schema2, tuningConfig.getShardSpec(), - new DateTime().toString(), + DateTimes.nowUtc().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() )); @@ -565,7 +567,7 @@ public class RealtimeManagerTest query, ImmutableList.of( new SegmentDescriptor( - new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", 0 )) @@ -580,7 +582,7 @@ public class RealtimeManagerTest query, ImmutableList.of( new SegmentDescriptor( - new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", 1 )) @@ -650,8 +652,8 @@ public class RealtimeManagerTest Thread.sleep(10); } - final Interval interval_26_28 = new Interval("2011-03-26T00:00:00.000Z/2011-03-28T00:00:00.000Z"); - final Interval interval_28_29 = new Interval("2011-03-28T00:00:00.000Z/2011-03-29T00:00:00.000Z"); + final Interval interval_26_28 = Intervals.of("2011-03-26T00:00:00.000Z/2011-03-28T00:00:00.000Z"); + final Interval interval_28_29 = Intervals.of("2011-03-28T00:00:00.000Z/2011-03-29T00:00:00.000Z"); final SegmentDescriptor descriptor_26_28_0 = new SegmentDescriptor(interval_26_28, "ver0", 0); final SegmentDescriptor descriptor_28_29_0 = new SegmentDescriptor(interval_28_29, "ver1", 0); final SegmentDescriptor descriptor_26_28_1 = new SegmentDescriptor(interval_26_28, "ver0", 1); @@ -799,7 +801,7 @@ public class RealtimeManagerTest @Override public DateTime getTimestamp() { - return new DateTime(timestamp); + return DateTimes.utc(timestamp); } @Override diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java index af2aedecd44..e1237ef1696 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Query; @@ -44,7 +45,6 @@ import io.druid.segment.realtime.appenderator.AppenderatorDriverTest.TestSegment import io.druid.segment.realtime.appenderator.AppenderatorDriverTest.TestSegmentHandoffNotifierFactory; import io.druid.timeline.DataSegment; import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; @@ -73,17 +73,17 @@ public class AppenderatorDriverFailTest private static final List ROWS = ImmutableList.of( new MapBasedInputRow( - new DateTime("2000"), + DateTimes.of("2000"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", "1") ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", 2.0) ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of("dim2", "bar", "met1", 2.0) ) diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java index 46c75009483..d6430b6acff 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java @@ -34,6 +34,8 @@ import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -47,7 +49,6 @@ import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.PartitionChunk; import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -77,17 +78,17 @@ public class AppenderatorDriverTest private static final List ROWS = Arrays.asList( new MapBasedInputRow( - new DateTime("2000"), + DateTimes.of("2000"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", "1") ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", 2.0) ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of("dim2", "bar", "met1", 2.0) ) @@ -145,8 +146,8 @@ public class AppenderatorDriverTest Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), - new SegmentIdentifier(DATA_SOURCE, new Interval("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) ); @@ -164,7 +165,7 @@ public class AppenderatorDriverTest for (int i = 0; i < numSegments * MAX_ROWS_PER_SEGMENT; i++) { committerSupplier.setMetadata(i + 1); InputRow row = new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of( "dim2", @@ -236,7 +237,7 @@ public class AppenderatorDriverTest Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) ); @@ -259,7 +260,7 @@ public class AppenderatorDriverTest ImmutableSet.of( // The second and third rows have the same dataSource, interval, and version, but different shardSpec of // different partitionNum - new SegmentIdentifier(DATA_SOURCE, new Interval("2000T01/PT1H"), VERSION, new NumberedShardSpec(i - 1, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(i - 1, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) ); @@ -322,14 +323,14 @@ public class AppenderatorDriverTest Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(handedoffFromSequence0.getSegments()) ); Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(handedoffFromSequence1.getSegments()) ); @@ -417,14 +418,15 @@ public class AppenderatorDriverTest ) throws IOException { synchronized (counters) { - final long timestampTruncated = granularity.bucketStart(row.getTimestamp()).getMillis(); + DateTime dateTimeTruncated = granularity.bucketStart(row.getTimestamp()); + final long timestampTruncated = dateTimeTruncated.getMillis(); if (!counters.containsKey(timestampTruncated)) { counters.put(timestampTruncated, new AtomicInteger()); } final int partitionNum = counters.get(timestampTruncated).getAndIncrement(); return new SegmentIdentifier( dataSource, - granularity.bucket(new DateTime(timestampTruncated)), + granularity.bucket(dateTimeTruncated), VERSION, new NumberedShardSpec(partitionNum, 0) ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java index f678f64b955..10d314eed88 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java @@ -28,6 +28,8 @@ import com.google.common.collect.Lists; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -43,8 +45,6 @@ import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.realtime.plumber.Committers; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -303,7 +303,7 @@ public class AppenderatorTest // Query1: 2000/2001 final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) - .intervals(ImmutableList.of(new Interval("2000/2001"))) + .intervals(ImmutableList.of(Intervals.of("2000/2001"))) .aggregators( Arrays.asList( new LongSumAggregatorFactory("count", "count"), @@ -319,7 +319,7 @@ public class AppenderatorTest "query1", ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ) ), @@ -329,7 +329,7 @@ public class AppenderatorTest // Query2: 2000/2002 final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) - .intervals(ImmutableList.of(new Interval("2000/2002"))) + .intervals(ImmutableList.of(Intervals.of("2000/2002"))) .aggregators( Arrays.asList( new LongSumAggregatorFactory("count", "count"), @@ -345,11 +345,11 @@ public class AppenderatorTest "query2", ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) ) ), @@ -359,7 +359,7 @@ public class AppenderatorTest // Query3: 2000/2001T01 final TimeseriesQuery query3 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) - .intervals(ImmutableList.of(new Interval("2000/2001T01"))) + .intervals(ImmutableList.of(Intervals.of("2000/2001T01"))) .aggregators( Arrays.asList( new LongSumAggregatorFactory("count", "count"), @@ -374,11 +374,11 @@ public class AppenderatorTest Assert.assertEquals( ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) ) ), @@ -390,8 +390,8 @@ public class AppenderatorTest .dataSource(AppenderatorTester.DATASOURCE) .intervals( ImmutableList.of( - new Interval("2000/2001T01"), - new Interval("2001T03/2001T04") + Intervals.of("2000/2001T01"), + Intervals.of("2001T03/2001T04") ) ) .aggregators( @@ -408,11 +408,11 @@ public class AppenderatorTest Assert.assertEquals( ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) ) ), @@ -465,7 +465,7 @@ public class AppenderatorTest "query1", ImmutableList.of( new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) ) ), @@ -486,7 +486,7 @@ public class AppenderatorTest new MultipleSpecificSegmentSpec( ImmutableList.of( new SegmentDescriptor( - new Interval("2001/PT1H"), + Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum() ) @@ -501,7 +501,7 @@ public class AppenderatorTest "query2", ImmutableList.of( new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) ) ), @@ -522,12 +522,12 @@ public class AppenderatorTest new MultipleSpecificSegmentSpec( ImmutableList.of( new SegmentDescriptor( - new Interval("2001/PT1H"), + Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum() ), new SegmentDescriptor( - new Interval("2001T03/PT1H"), + Intervals.of("2001T03/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum() ) @@ -542,7 +542,7 @@ public class AppenderatorTest "query2", ImmutableList.of( new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) ) ), @@ -555,7 +555,7 @@ public class AppenderatorTest { return new SegmentIdentifier( AppenderatorTester.DATASOURCE, - new Interval(interval), + Intervals.of(interval), version, new LinearShardSpec(partitionNum) ); @@ -564,7 +564,7 @@ public class AppenderatorTest static InputRow IR(String ts, String dim, long met) { return new MapBasedInputRow( - new DateTime(ts).getMillis(), + DateTimes.of(ts).getMillis(), ImmutableList.of("dim"), ImmutableMap.of( "dim", diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java index 29c5f3ba4a3..abb086b295c 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -36,21 +36,21 @@ public class CommittedTest private static final SegmentIdentifier IDENTIFIER_OBJECT1 = new SegmentIdentifier( "foo", - new Interval("2000/2001"), + Intervals.of("2000/2001"), "2000", new LinearShardSpec(1) ); private static final SegmentIdentifier IDENTIFIER_OBJECT2 = new SegmentIdentifier( "foo", - new Interval("2001/2002"), + Intervals.of("2001/2002"), "2001", new LinearShardSpec(1) ); private static final SegmentIdentifier IDENTIFIER_OBJECT3 = new SegmentIdentifier( "foo", - new Interval("2001/2002"), + Intervals.of("2001/2002"), "2001", new LinearShardSpec(2) ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java index a286515a3f9..f2ebc839d64 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.guice.GuiceInjectors; import io.druid.initialization.Initialization; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DruidProcessingConfig; import io.druid.query.aggregation.AggregatorFactory; @@ -44,7 +45,6 @@ import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.realtime.FireDepartmentMetrics; import io.druid.segment.realtime.plumber.Committers; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -157,7 +157,7 @@ public class DefaultOfflineAppenderatorFactoryTest Assert.assertEquals(null, appenderator.startJob()); SegmentIdentifier identifier = new SegmentIdentifier( "dataSourceName", - new Interval("2000/2001"), + Intervals.of("2000/2001"), "A", new LinearShardSpec(0) ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java index 79571ef6f7e..d3bc406c0e9 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java @@ -21,6 +21,7 @@ package io.druid.segment.realtime.appenderator; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.partition.NumberedShardSpec; import org.joda.time.Interval; import org.junit.Assert; @@ -29,7 +30,7 @@ import org.junit.Test; public class SegmentIdentifierTest { private static final String DATA_SOURCE = "foo"; - private static final Interval INTERVAL = new Interval("2000/PT1H"); + private static final Interval INTERVAL = Intervals.of("2000/PT1H"); private static final String VERSION = "v1"; private static final NumberedShardSpec SHARD_SPEC_0 = new NumberedShardSpec(0, 2); private static final NumberedShardSpec SHARD_SPEC_1 = new NumberedShardSpec(1, 2); diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java index 4b2b106d8f2..94c87c953c3 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java @@ -27,12 +27,12 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; import org.apache.commons.io.IOUtils; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -228,7 +228,7 @@ public class EventReceiverFirehoseTest @Test(timeout = 40_000L) public void testShutdownWithPrevTime() throws Exception { - firehose.shutdown(DateTime.now().minusMinutes(2).toString()); + firehose.shutdown(DateTimes.nowUtc().minusMinutes(2).toString()); while (!firehose.isClosed()) { Thread.sleep(50); } @@ -237,7 +237,7 @@ public class EventReceiverFirehoseTest @Test(timeout = 40_000L) public void testShutdown() throws Exception { - firehose.shutdown(DateTime.now().plusMillis(100).toString()); + firehose.shutdown(DateTimes.nowUtc().plusMillis(100).toString()); while (!firehose.isClosed()) { Thread.sleep(50); } diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java index bf3a4780567..0d983788f39 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java @@ -31,6 +31,8 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.hll.HyperLogLogCollector; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -45,8 +47,6 @@ import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexStorageAdapter; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -127,7 +127,7 @@ public class IngestSegmentFirehoseTest final InputRow row = firehose.nextRow(); Assert.assertNotNull(row); if (count == 0) { - Assert.assertEquals(new DateTime("2014-10-22T00Z"), row.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-22T00Z"), row.getTimestamp()); Assert.assertEquals("host1", row.getRaw("host")); Assert.assertEquals("0,1", row.getRaw("spatial")); Assert.assertEquals(10L, row.getRaw("visited_sum")); @@ -148,14 +148,14 @@ public class IngestSegmentFirehoseTest // Do a spatial filter final IngestSegmentFirehose firehose2 = new IngestSegmentFirehose( - ImmutableList.of(new WindowedStorageAdapter(queryable, new Interval("2000/3000"))), + ImmutableList.of(new WindowedStorageAdapter(queryable, Intervals.of("2000/3000"))), ImmutableList.of("host", "spatial"), ImmutableList.of("visited_sum", "unique_hosts"), new SpatialDimFilter("spatial", new RadiusBound(new float[]{1, 0}, 0.1f)) ); final InputRow row = firehose2.nextRow(); Assert.assertFalse(firehose2.hasMore()); - Assert.assertEquals(new DateTime("2014-10-22T00Z"), row.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-22T00Z"), row.getTimestamp()); Assert.assertEquals("host2", row.getRaw("host")); Assert.assertEquals("1,0", row.getRaw("spatial")); Assert.assertEquals(40L, row.getRaw("visited_sum")); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java index bc40f3dcb41..b328add2de7 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Sets; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.client.coordinator.CoordinatorClient; +import io.druid.java.util.common.Intervals; import io.druid.query.SegmentDescriptor; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; @@ -53,7 +54,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffCallbackNotCalled() throws IOException, InterruptedException { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); SegmentDescriptor descriptor = new SegmentDescriptor( @@ -110,7 +111,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffCallbackCalled() throws IOException, InterruptedException { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); SegmentDescriptor descriptor = new SegmentDescriptor( @@ -168,7 +169,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffChecksForVersion() { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertFalse( @@ -212,7 +213,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffChecksForAssignableServer() { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertTrue( @@ -243,7 +244,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffChecksForPartitionNumber() { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertTrue( @@ -281,7 +282,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest Lists.newArrayList( new ImmutableSegmentLoadInfo( createSegment( - new Interval( + Intervals.of( "2011-04-01/2011-04-02" ), "v1", 1 ), @@ -289,7 +290,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest ) ), new SegmentDescriptor( - new Interval( + Intervals.of( "2011-04-01/2011-04-03" ), "v1", 1 ) @@ -301,7 +302,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest Lists.newArrayList( new ImmutableSegmentLoadInfo( createSegment( - new Interval( + Intervals.of( "2011-04-01/2011-04-04" ), "v1", 1 ), @@ -309,7 +310,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest ) ), new SegmentDescriptor( - new Interval( + Intervals.of( "2011-04-02/2011-04-03" ), "v1", 1 ) diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java index 74feccfb9e1..1a3d8ac65ad 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java @@ -19,7 +19,7 @@ package io.druid.segment.realtime.plumber; -import org.joda.time.Interval; +import io.druid.java.util.common.Intervals; import org.junit.Assert; import org.junit.Test; @@ -31,7 +31,7 @@ public class IntervalStartVersioningPolicyTest public void testGetVersion() throws Exception { IntervalStartVersioningPolicy policy = new IntervalStartVersioningPolicy(); - String version = policy.getVersion(new Interval("2013-01-01/2013-01-02")); + String version = policy.getVersion(Intervals.of("2013-01-01/2013-01-02")); Assert.assertEquals("2013-01-01T00:00:00.000Z", version); } } diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java index 8fa35b7b12d..f4dfc672346 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.Assert; @@ -34,7 +35,7 @@ public class MessageTimeRejectionPolicyFactoryTest Period period = new Period("PT10M"); RejectionPolicy rejectionPolicy = new MessageTimeRejectionPolicyFactory().create(period); - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); DateTime past = now.minus(period).minus(1); DateTime future = now.plus(period).plus(1); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index 640f16b1188..414dd897738 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -36,6 +36,8 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.Query; @@ -256,10 +258,10 @@ public class RealtimePlumberSchoolTest .put( 0L, new Sink( - new Interval(0, TimeUnit.HOURS.toMillis(1)), + Intervals.utc(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(), - new DateTime("2014-12-01T12:34:56.789").toString(), + DateTimes.of("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() ) @@ -303,10 +305,10 @@ public class RealtimePlumberSchoolTest .put( 0L, new Sink( - new Interval(0, TimeUnit.HOURS.toMillis(1)), + Intervals.utc(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(), - new DateTime("2014-12-01T12:34:56.789").toString(), + DateTimes.of("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() ) @@ -353,7 +355,7 @@ public class RealtimePlumberSchoolTest private void testPersistHydrantGapsHelper(final Object commitMetadata) throws Exception { - Interval testInterval = new Interval(new DateTime("1970-01-01"), new DateTime("1971-01-01")); + Interval testInterval = new Interval(DateTimes.of("1970-01-01"), DateTimes.of("1971-01-01")); RealtimePlumber plumber2 = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema2, tuningConfig, metrics); plumber2.getSinks() @@ -363,7 +365,7 @@ public class RealtimePlumberSchoolTest testInterval, schema2, tuningConfig.getShardSpec(), - new DateTime("2014-12-01T12:34:56.789").toString(), + DateTimes.of("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() ) @@ -418,8 +420,8 @@ public class RealtimePlumberSchoolTest List hydrants = Lists.newArrayList(sinks.get(new Long(0))); - DateTime startTime = new DateTime("1970-01-01T00:00:00.000Z"); - Interval expectedInterval = new Interval(startTime, new DateTime("1971-01-01T00:00:00.000Z")); + DateTime startTime = DateTimes.of("1970-01-01T00:00:00.000Z"); + Interval expectedInterval = new Interval(startTime, DateTimes.of("1971-01-01T00:00:00.000Z")); Assert.assertEquals(0, hydrants.get(0).getCount()); Assert.assertEquals( expectedInterval, @@ -580,13 +582,13 @@ public class RealtimePlumberSchoolTest @Override public long getTimestampFromEpoch() { - return new DateTime(timeStr).getMillis(); + return DateTimes.of(timeStr).getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime(timeStr); + return DateTimes.of(timeStr); } @Override @@ -640,13 +642,13 @@ public class RealtimePlumberSchoolTest @Override public long getTimestampFromEpoch() { - return new DateTime(timeStr).getMillis(); + return DateTimes.of(timeStr).getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime(timeStr); + return DateTimes.of(timeStr); } @Override diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java index c7324f64306..7eab7ed587a 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.Assert; @@ -35,7 +36,7 @@ public class ServerTimeRejectionPolicyFactoryTest RejectionPolicy rejectionPolicy = new ServerTimeRejectionPolicyFactory().create(period); - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); DateTime past = now.minus(period).minus(100); DateTime future = now.plus(period).plus(100); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java index c93323da67e..b84d154b2f0 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java @@ -24,6 +24,8 @@ import com.google.common.collect.Lists; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -54,8 +56,8 @@ public class SinkTest new DefaultObjectMapper() ); - final Interval interval = new Interval("2013-01-01/2013-01-02"); - final String version = new DateTime().toString(); + final Interval interval = Intervals.of("2013-01-01/2013-01-02"); + final String version = DateTimes.nowUtc().toString(); RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig( 100, new Period("P1Y"), @@ -94,13 +96,13 @@ public class SinkTest @Override public long getTimestampFromEpoch() { - return new DateTime("2013-01-01").getMillis(); + return DateTimes.of("2013-01-01").getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime("2013-01-01"); + return DateTimes.of("2013-01-01"); } @Override @@ -142,7 +144,7 @@ public class SinkTest ); FireHydrant currHydrant = sink.getCurrHydrant(); - Assert.assertEquals(new Interval("2013-01-01/PT1M"), currHydrant.getIndex().getInterval()); + Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), currHydrant.getIndex().getInterval()); FireHydrant swapHydrant = sink.swap(); @@ -159,13 +161,13 @@ public class SinkTest @Override public long getTimestampFromEpoch() { - return new DateTime("2013-01-01").getMillis(); + return DateTimes.of("2013-01-01").getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime("2013-01-01"); + return DateTimes.of("2013-01-01"); } @Override @@ -208,7 +210,7 @@ public class SinkTest Assert.assertEquals(currHydrant, swapHydrant); Assert.assertNotSame(currHydrant, sink.getCurrHydrant()); - Assert.assertEquals(new Interval("2013-01-01/PT1M"), sink.getCurrHydrant().getIndex().getInterval()); + Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), sink.getCurrHydrant().getIndex().getInterval()); Assert.assertEquals(2, Iterators.size(sink.iterator())); } diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index aacf8a3ac94..a397c749171 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -46,6 +46,7 @@ import io.druid.client.TimelineServerView; import io.druid.client.selector.HighestPriorityTierSelectorStrategy; import io.druid.client.selector.RandomServerSelectorStrategy; import io.druid.client.selector.ServerSelector; +import io.druid.java.util.common.Intervals; import io.druid.query.TableDataSource; import io.druid.query.metadata.SegmentMetadataQueryConfig; import io.druid.server.coordination.ServerType; @@ -57,7 +58,7 @@ import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.SingleElementPartitionChunk; import org.easymock.EasyMock; import org.joda.time.DateTime; -import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -69,7 +70,7 @@ public class ClientInfoResourceTest { private static final String KEY_DIMENSIONS = "dimensions"; private static final String KEY_METRICS = "metrics"; - private static final DateTime FIXED_TEST_TIME = new DateTime(2015, 9, 14, 0, 0); /* always use the same current time for unit tests */ + private static final DateTime FIXED_TEST_TIME = new DateTime(2015, 9, 14, 0, 0, ISOChronology.getInstanceUTC()); /* always use the same current time for unit tests */ private final String dataSource = "test-data-source"; @@ -373,7 +374,7 @@ public class ClientInfoResourceTest { DataSegment segment = DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) @@ -381,7 +382,7 @@ public class ClientInfoResourceTest .build(); server.addDataSegment(segment.getIdentifier(), segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); - timeline.add(new Interval(interval), version, new SingleElementPartitionChunk(ss)); + timeline.add(Intervals.of(interval), version, new SingleElementPartitionChunk(ss)); } private void addSegmentWithShardSpec( @@ -396,7 +397,7 @@ public class ClientInfoResourceTest { DataSegment segment = DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) @@ -405,7 +406,7 @@ public class ClientInfoResourceTest .build(); server.addDataSegment(segment.getIdentifier(), segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); - timeline.add(new Interval(interval), version, shardSpec.createChunk(ss)); + timeline.add(Intervals.of(interval), version, shardSpec.createChunk(ss)); } private ClientInfoResource getResourceTestHelper( diff --git a/server/src/test/java/io/druid/server/SegmentManagerTest.java b/server/src/test/java/io/druid/server/SegmentManagerTest.java index 3176b00368a..ef6aab8b55d 100644 --- a/server/src/test/java/io/druid/server/SegmentManagerTest.java +++ b/server/src/test/java/io/druid/server/SegmentManagerTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.segment.AbstractSegment; import io.druid.segment.QueryableIndex; @@ -148,9 +149,9 @@ public class SegmentManagerTest private static final List segments = ImmutableList.of( new DataSegment( "small_source", - new Interval("0/1000"), + Intervals.of("0/1000"), "0", - ImmutableMap.of("interval", new Interval("0/1000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("0/1000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -159,9 +160,9 @@ public class SegmentManagerTest ), new DataSegment( "small_source", - new Interval("1000/2000"), + Intervals.of("1000/2000"), "0", - ImmutableMap.of("interval", new Interval("1000/2000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -170,9 +171,9 @@ public class SegmentManagerTest ), new DataSegment( "large_source", - new Interval("0/1000"), + Intervals.of("0/1000"), "0", - ImmutableMap.of("interval", new Interval("0/1000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("0/1000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -181,9 +182,9 @@ public class SegmentManagerTest ), new DataSegment( "large_source", - new Interval("1000/2000"), + Intervals.of("1000/2000"), "0", - ImmutableMap.of("interval", new Interval("1000/2000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -193,9 +194,9 @@ public class SegmentManagerTest // overshadowing the ahead segment new DataSegment( "large_source", - new Interval("1000/2000"), + Intervals.of("1000/2000"), "1", - ImmutableMap.of("interval", new Interval("1000/2000"), "version", 1), + ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 1), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), diff --git a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java index f0edf608751..896ada8780e 100644 --- a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java +++ b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java @@ -24,11 +24,11 @@ import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.metadata.TestDerbyConnector; import io.druid.server.metrics.NoopServiceEmitter; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -76,7 +76,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); ObjectMapper mapper = new DefaultObjectMapper(); AuditEntry serde = mapper.readValue(mapper.writeValueAsString(entry), AuditEntry.class); @@ -95,7 +95,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); auditManager.doAudit(entry); byte[] payload = connector.lookup( @@ -121,16 +121,14 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); auditManager.doAudit(entry); auditManager.doAudit(entry); List auditEntries = auditManager.fetchAuditHistory( "testKey", "testType", - new Interval( - "2012-01-01T00:00:00Z/2013-01-03T00:00:00Z" - ) + Intervals.of("2012-01-01T00:00:00Z/2013-01-03T00:00:00Z") ); Assert.assertEquals(2, auditEntries.size()); Assert.assertEquals(entry, auditEntries.get(0)); @@ -149,7 +147,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey2", @@ -160,7 +158,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); auditManager.doAudit(entry1); auditManager.doAudit(entry2); @@ -185,7 +183,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -196,7 +194,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry3 = new AuditEntry( "testKey", @@ -207,7 +205,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-03T00:00:00Z") + DateTimes.of("2013-01-03T00:00:00Z") ); auditManager.doAudit(entry1); auditManager.doAudit(entry2); diff --git a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java index 76c59048c1a..82e365cd2e5 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -42,7 +43,7 @@ public class SegmentChangeRequestDropTest { ObjectMapper mapper = new DefaultObjectMapper(); - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( diff --git a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java index ac0340f873c..965ab1f25ba 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -42,7 +43,7 @@ public class SegmentChangeRequestLoadTest { ObjectMapper mapper = new DefaultObjectMapper(); - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index 9412232afcc..33cf26e9dd9 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -32,6 +32,7 @@ import io.druid.client.cache.CacheConfig; import io.druid.client.cache.LocalCacheProvider; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; @@ -157,19 +158,19 @@ public class ServerManagerTest segmentManager ); - loadQueryable("test", "1", new Interval("P1d/2011-04-01")); - loadQueryable("test", "1", new Interval("P1d/2011-04-02")); - loadQueryable("test", "2", new Interval("P1d/2011-04-02")); - loadQueryable("test", "1", new Interval("P1d/2011-04-03")); - loadQueryable("test", "1", new Interval("P1d/2011-04-04")); - loadQueryable("test", "1", new Interval("P1d/2011-04-05")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T01")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T02")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T03")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T05")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T06")); - loadQueryable("test2", "1", new Interval("P1d/2011-04-01")); - loadQueryable("test2", "1", new Interval("P1d/2011-04-02")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-01")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-02")); + loadQueryable("test", "2", Intervals.of("P1d/2011-04-02")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-03")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-04")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-05")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T01")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T02")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T03")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T05")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T06")); + loadQueryable("test2", "1", Intervals.of("P1d/2011-04-01")); + loadQueryable("test2", "1", Intervals.of("P1d/2011-04-02")); } @Test @@ -178,9 +179,9 @@ public class ServerManagerTest Future future = assertQueryable( Granularities.DAY, "test", - new Interval("P1d/2011-04-01"), + Intervals.of("P1d/2011-04-01"), ImmutableList.>of( - new Pair("1", new Interval("P1d/2011-04-01")) + new Pair("1", Intervals.of("P1d/2011-04-01")) ) ); waitForTestVerificationAndCleanup(future); @@ -188,10 +189,10 @@ public class ServerManagerTest future = assertQueryable( Granularities.DAY, - "test", new Interval("P2d/2011-04-02"), + "test", Intervals.of("P2d/2011-04-02"), ImmutableList.>of( - new Pair("1", new Interval("P1d/2011-04-01")), - new Pair("2", new Interval("P1d/2011-04-02")) + new Pair("1", Intervals.of("P1d/2011-04-01")), + new Pair("2", Intervals.of("P1d/2011-04-02")) ) ); waitForTestVerificationAndCleanup(future); @@ -201,7 +202,7 @@ public class ServerManagerTest public void testDelete1() throws Exception { final String dataSouce = "test"; - final Interval interval = new Interval("2011-04-01/2011-04-02"); + final Interval interval = Intervals.of("2011-04-01/2011-04-02"); Future future = assertQueryable( Granularities.DAY, @@ -226,50 +227,50 @@ public class ServerManagerTest @Test public void testDelete2() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); waitForTestVerificationAndCleanup(future); - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); - dropQueryable("test", "1", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); + dropQueryable("test", "1", Intervals.of("2011-04-04/2011-04-05")); future = assertQueryable( Granularities.HOUR, - "test", new Interval("2011-04-04/2011-04-04T06"), + "test", Intervals.of("2011-04-04/2011-04-04T06"), ImmutableList.>of( - new Pair("2", new Interval("2011-04-04T00/2011-04-04T01")), - new Pair("2", new Interval("2011-04-04T01/2011-04-04T02")), - new Pair("2", new Interval("2011-04-04T02/2011-04-04T03")), - new Pair("2", new Interval("2011-04-04T04/2011-04-04T05")), - new Pair("2", new Interval("2011-04-04T05/2011-04-04T06")) + new Pair("2", Intervals.of("2011-04-04T00/2011-04-04T01")), + new Pair("2", Intervals.of("2011-04-04T01/2011-04-04T02")), + new Pair("2", Intervals.of("2011-04-04T02/2011-04-04T03")), + new Pair("2", Intervals.of("2011-04-04T04/2011-04-04T05")), + new Pair("2", Intervals.of("2011-04-04T05/2011-04-04T06")) ) ); waitForTestVerificationAndCleanup(future); future = assertQueryable( Granularities.HOUR, - "test", new Interval("2011-04-04/2011-04-04T03"), + "test", Intervals.of("2011-04-04/2011-04-04T03"), ImmutableList.>of( - new Pair("2", new Interval("2011-04-04T00/2011-04-04T01")), - new Pair("2", new Interval("2011-04-04T01/2011-04-04T02")), - new Pair("2", new Interval("2011-04-04T02/2011-04-04T03")) + new Pair("2", Intervals.of("2011-04-04T00/2011-04-04T01")), + new Pair("2", Intervals.of("2011-04-04T01/2011-04-04T02")), + new Pair("2", Intervals.of("2011-04-04T02/2011-04-04T03")) ) ); waitForTestVerificationAndCleanup(future); future = assertQueryable( Granularities.HOUR, - "test", new Interval("2011-04-04T04/2011-04-04T06"), + "test", Intervals.of("2011-04-04T04/2011-04-04T06"), ImmutableList.>of( - new Pair("2", new Interval("2011-04-04T04/2011-04-04T05")), - new Pair("2", new Interval("2011-04-04T05/2011-04-04T06")) + new Pair("2", Intervals.of("2011-04-04T04/2011-04-04T05")), + new Pair("2", Intervals.of("2011-04-04T05/2011-04-04T06")) ) ); waitForTestVerificationAndCleanup(future); @@ -278,13 +279,13 @@ public class ServerManagerTest @Test public void testReferenceCounting() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -307,7 +308,7 @@ public class ServerManagerTest queryWaitLatch.countDown(); future.get(); - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); for (SegmentForTesting segmentForTesting : factory.getAdapters()) { Assert.assertTrue(segmentForTesting.isClosed()); @@ -317,13 +318,13 @@ public class ServerManagerTest @Test public void testReferenceCountingWhileQueryExecuting() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -343,7 +344,7 @@ public class ServerManagerTest Assert.assertFalse(segmentForTesting.isClosed()); } - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); for (SegmentForTesting segmentForTesting : factory.getAdapters()) { Assert.assertFalse(segmentForTesting.isClosed()); @@ -360,13 +361,13 @@ public class ServerManagerTest @Test public void testMultipleDrops() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -386,8 +387,8 @@ public class ServerManagerTest Assert.assertFalse(segmentForTesting.isClosed()); } - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); for (SegmentForTesting segmentForTesting : factory.getAdapters()) { Assert.assertFalse(segmentForTesting.isClosed()); diff --git a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java index a1456819540..dc14592b820 100644 --- a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java @@ -35,6 +35,7 @@ import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.lifecycle.Lifecycle; @@ -269,7 +270,7 @@ public class ZkCoordinatorTest extends CuratorTestBase { zkCoordinator.start(); - final DataSegment segment = makeSegment("test", "1", new Interval("P1d/2011-04-01")); + final DataSegment segment = makeSegment("test", "1", Intervals.of("P1d/2011-04-01")); zkCoordinator.removeSegment(segment, new DataSegmentChangeCallback() { @@ -318,7 +319,7 @@ public class ZkCoordinatorTest extends CuratorTestBase { zkCoordinator.start(); - final DataSegment segment = makeSegment("test", "1", new Interval("P1d/2011-04-01")); + final DataSegment segment = makeSegment("test", "1", Intervals.of("P1d/2011-04-01")); zkCoordinator.addSegment(segment, new DataSegmentChangeCallback() { @@ -371,19 +372,19 @@ public class ZkCoordinatorTest extends CuratorTestBase { List segments = Lists.newLinkedList(); for (int i = 0; i < COUNT; ++i) { - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test" + i, "2", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-03"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-04"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-05"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T01"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T02"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T03"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T05"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T06"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-03"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-04"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-05"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T01"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T02"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T03"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T05"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T06"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-02"))); } Collections.sort(segments); @@ -532,11 +533,11 @@ public class ZkCoordinatorTest extends CuratorTestBase List segments = Lists.newLinkedList(); for (int i = 0; i < COUNT; ++i) { - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test" + i, "2", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-02"))); } Collections.sort(segments); diff --git a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java index 6286cbaf650..736c13bf748 100644 --- a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java @@ -31,6 +31,7 @@ import com.google.common.util.concurrent.MoreExecutors; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.server.coordination.BatchDataSegmentAnnouncer; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.SegmentChangeRequestHistory; @@ -43,7 +44,6 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.test.TestingCluster; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; @@ -353,11 +353,11 @@ public class BatchDataSegmentAnnouncerTest .dataSource("foo") .interval( new Interval( - new DateTime("2013-01-01").plusDays(offset), - new DateTime("2013-01-02").plusDays(offset) + DateTimes.of("2013-01-01").plusDays(offset), + DateTimes.of("2013-01-02").plusDays(offset) ) ) - .version(new DateTime().toString()) + .version(DateTimes.nowUtc().toString()) .dimensions(ImmutableList.of("dim1", "dim2")) .metrics(ImmutableList.of("met1", "met2")) .loadSpec(ImmutableMap.of("type", "local")) diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java index 874b6a745f8..c592a7bc47e 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -22,6 +22,7 @@ package io.druid.server.coordinator; import com.carrotsearch.junitbenchmarks.AbstractBenchmark; import com.carrotsearch.junitbenchmarks.BenchmarkOptions; import com.google.common.util.concurrent.MoreExecutors; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import org.joda.time.Interval; import org.junit.AfterClass; @@ -86,8 +87,8 @@ public class CostBalancerStrategyBenchmark extends AbstractBenchmark // Benchmark Joda Interval Gap impl vs CostBalancer.gapMillis - private final Interval interval1 = new Interval("2015-01-01T01:00:00Z/2015-01-01T02:00:00Z"); - private final Interval interval2 = new Interval("2015-02-01T01:00:00Z/2015-02-01T02:00:00Z"); + private final Interval interval1 = Intervals.of("2015-01-01T01:00:00Z/2015-01-01T02:00:00Z"); + private final Interval interval2 = Intervals.of("2015-02-01T01:00:00Z/2015-02-01T02:00:00Z"); volatile Long sum; @BenchmarkOptions(warmupRounds = 1000, benchmarkRounds = 1000000) diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java index 521a33da504..781ddceffd1 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java @@ -25,6 +25,8 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; @@ -40,7 +42,7 @@ import java.util.concurrent.Executors; public class CostBalancerStrategyTest { - private static final Interval day = new Interval("2015-01-01T00/2015-01-01T01"); + private static final Interval day = Intervals.of("2015-01-01T00/2015-01-01T01"); /** * Create Druid cluster with serverCount servers having maxSegments segments each, and 1 server with 98 segment @@ -134,7 +136,7 @@ public class CostBalancerStrategyTest List serverHolderList = setupDummyCluster(10, 20); DataSegment segment = getSegment(1000); - final DateTime referenceTimestamp = new DateTime("2014-01-01"); + final DateTime referenceTimestamp = DateTimes.of("2014-01-01"); BalancerStrategy strategy = new CostBalancerStrategy( MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1)) ); @@ -146,7 +148,7 @@ public class CostBalancerStrategyTest @Test public void testComputeJointSegmentCost() { - DateTime referenceTime = new DateTime("2014-01-01T00:00:00"); + DateTime referenceTime = DateTimes.of("2014-01-01T00:00:00"); CostBalancerStrategy strategy = new CostBalancerStrategy( MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(4)) ); diff --git a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java index 1faf9f3eed8..b6e89f69d6b 100644 --- a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; @@ -39,7 +40,7 @@ import java.util.concurrent.Executors; public class DiskNormalizedCostBalancerStrategyTest { - private static final Interval day = new Interval("2015-01-01T00/2015-01-01T01"); + private static final Interval day = Intervals.of("2015-01-01T00/2015-01-01T01"); /** * Create Druid cluster with serverCount servers having maxSegments segments each, and 1 server with 98 segment diff --git a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java index 97b5dbb9906..18acacaa782 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java @@ -26,11 +26,11 @@ import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -46,7 +46,7 @@ public class DruidClusterTest private static final List segments = ImmutableList.of( new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container1", "blobPath", "blobPath1"), null, @@ -57,7 +57,7 @@ public class DruidClusterTest ), new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container2", "blobPath", "blobPath2"), null, diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java index da55a9dd017..8a67377377f 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java @@ -29,6 +29,7 @@ import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DruidServer; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataRuleManager; import io.druid.server.coordinator.helper.DruidCoordinatorRuleRunner; import io.druid.server.coordinator.rules.PeriodLoadRule; @@ -36,7 +37,6 @@ import io.druid.server.coordinator.rules.Rule; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Before; @@ -97,8 +97,8 @@ public class DruidCoordinatorBalancerProfiler "segment" + i, new DataSegment( "datasource" + i, - new Interval(new DateTime("2012-01-01"), (new DateTime("2012-01-01")).plusHours(1)), - (new DateTime("2012-03-01")).toString(), + new Interval(DateTimes.of("2012-01-01"), (DateTimes.of("2012-01-01")).plusHours(1)), + (DateTimes.of("2012-03-01")).toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -155,7 +155,7 @@ public class DruidCoordinatorBalancerProfiler .withReplicationThrottleLimit(5) .build() ) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withEmitter(emitter) .withDatabaseRuleManager(manager) .withReplicationManager(new ReplicationThrottler(2, 500)) @@ -245,7 +245,7 @@ public class DruidCoordinatorBalancerProfiler MAX_SEGMENTS_TO_MOVE ).build() ) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorBalancerTester tester = new DruidCoordinatorBalancerTester(coordinator); watch.start(); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java index ea6ca8b7921..ea22bae7c4b 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java @@ -27,6 +27,7 @@ import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; @@ -84,9 +85,9 @@ public class DruidCoordinatorBalancerTest segment3 = EasyMock.createMock(DataSegment.class); segment4 = EasyMock.createMock(DataSegment.class); - DateTime start1 = new DateTime("2012-01-01"); - DateTime start2 = new DateTime("2012-02-01"); - DateTime version = new DateTime("2012-03-01"); + DateTime start1 = DateTimes.of("2012-01-01"); + DateTime start2 = DateTimes.of("2012-02-01"); + DateTime version = DateTimes.of("2012-03-01"); segment1 = new DataSegment( "datasource1", new Interval(start1, start1.plusHours(1)), @@ -295,7 +296,7 @@ public class DruidCoordinatorBalancerTest ).build() ) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")); + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")); } private void mockDruidServer( diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java index 2ad1723fdc3..eeafcc8d7b4 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java @@ -31,6 +31,8 @@ import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceEventBuilder; import io.druid.client.DruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataRuleManager; import io.druid.segment.IndexIO; import io.druid.server.coordination.ServerType; @@ -75,14 +77,14 @@ public class DruidCoordinatorRuleRunnerTest EmittingLogger.registerEmitter(emitter); databaseRuleManager = EasyMock.createMock(MetadataRuleManager.class); - DateTime start = new DateTime("2012-01-01"); + DateTime start = DateTimes.of("2012-01-01"); availableSegments = Lists.newArrayList(); for (int i = 0; i < 24; i++) { availableSegments.add( new DataSegment( "test", new Interval(start, start.plusHours(1)), - new DateTime().toString(), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -123,15 +125,15 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), ImmutableMap.of("hot", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("normal", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("cold", 1) ) )).atLeastOnce(); @@ -206,7 +208,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withDynamicConfigs(new CoordinatorDynamicConfig.Builder().withMaxSegmentsToMove(5).build()) .build(); @@ -241,11 +243,11 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), ImmutableMap.of("hot", 2) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("cold", 1) ) ) @@ -316,7 +318,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -349,11 +351,11 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -419,7 +421,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -449,11 +451,11 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -496,7 +498,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); ruleRunner.run(params); @@ -517,7 +519,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-02T00:00:00.000Z/2012-01-03T00:00:00.000Z"), + Intervals.of("2012-01-02T00:00:00.000Z/2012-01-03T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -578,10 +580,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("normal", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -628,7 +630,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -651,10 +653,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("normal", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -716,7 +718,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -742,10 +744,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -811,7 +813,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -835,10 +837,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -902,7 +904,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -922,7 +924,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T01:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T01:00:00.000Z"), ImmutableMap.of("normal", 0) ) ) @@ -1007,7 +1009,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -1037,7 +1039,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), ImmutableMap.of("hot", 2) ) ) @@ -1091,7 +1093,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -1103,8 +1105,8 @@ public class DruidCoordinatorRuleRunnerTest DataSegment overFlowSegment = new DataSegment( "test", - new Interval("2012-02-01/2012-02-02"), - new DateTime().toString(), + Intervals.of("2012-02-01/2012-02-02"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -1120,7 +1122,7 @@ public class DruidCoordinatorRuleRunnerTest .withAvailableSegments(Arrays.asList(overFlowSegment)) .withDatabaseRuleManager(databaseRuleManager) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .build() ); @@ -1162,7 +1164,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), ImmutableMap.of( "hot", 1, DruidServer.DEFAULT_TIER, 1 @@ -1223,7 +1225,7 @@ public class DruidCoordinatorRuleRunnerTest .withAvailableSegments(availableSegments) .withDatabaseRuleManager(databaseRuleManager) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .build(); @@ -1251,7 +1253,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -1260,8 +1262,8 @@ public class DruidCoordinatorRuleRunnerTest DataSegment overFlowSegment = new DataSegment( "test", - new Interval("2012-02-01/2012-02-02"), - new DateTime().toString(), + Intervals.of("2012-02-01/2012-02-02"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -1330,7 +1332,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -1348,7 +1350,7 @@ public class DruidCoordinatorRuleRunnerTest Set availableSegments = new HashSet<>(); DataSegment v1 = new DataSegment( "test", - new Interval("2012-01-01/2012-01-02"), + Intervals.of("2012-01-01/2012-01-02"), "1", Maps.newHashMap(), Lists.newArrayList(), @@ -1359,7 +1361,7 @@ public class DruidCoordinatorRuleRunnerTest ); DataSegment v2 = new DataSegment( "test", - new Interval("2012-01-01/2012-01-02"), + Intervals.of("2012-01-01/2012-01-02"), "2", Maps.newHashMap(), Lists.newArrayList(), @@ -1417,7 +1419,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withDynamicConfigs(new CoordinatorDynamicConfig.Builder().withMaxSegmentsToMove(5).build()) .build(); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java index fc541c58e02..eec7a87d88d 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java @@ -25,11 +25,11 @@ import com.google.common.collect.Lists; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.Intervals; import io.druid.server.coordinator.helper.DruidCoordinatorSegmentMerger; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -46,10 +46,10 @@ public class DruidCoordinatorSegmentMergerTest public void testNoMerges() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -61,10 +61,10 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeAtStart() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(90).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(90).build() ); Assert.assertEquals( @@ -78,10 +78,10 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeAtEnd() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(20).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(20).build() ); Assert.assertEquals( @@ -95,10 +95,10 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeInMiddle() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(10).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(20).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(10).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(20).build() ); Assert.assertEquals( @@ -112,9 +112,9 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeNoncontiguous() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(10).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(10).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(10).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(10).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(10).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(10).build() ); Assert.assertEquals( @@ -128,12 +128,12 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeSeriesByteLimited() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(40).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(40).build() ); Assert.assertEquals( @@ -149,16 +149,16 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeSeriesSegmentLimited() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-07/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-08/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-09/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-10/P1D")).version("2").size(1).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-07/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-08/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-10/P1D")).version("2").size(1).build() ); Assert.assertEquals( @@ -182,13 +182,13 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMergeWithBacktracking() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P4D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("3").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("4").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("3").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-07/P1D")).version("2").size(20).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P4D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("4").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("3").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-07/P1D")).version("2").size(20).build() ); Assert.assertEquals( @@ -203,10 +203,10 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMergeWithGapsAlignedStart() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P8D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-09/P1D")).version("3").size(8).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P8D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("3").size(8).build() ); Assert.assertEquals( @@ -220,10 +220,10 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMergeWithGapsNonalignedStart() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P8D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-09/P1D")).version("3").size(8).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P8D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("3").size(8).build() ); Assert.assertEquals( @@ -237,12 +237,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge1() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -254,12 +254,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge2() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(15).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(15).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -273,12 +273,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge3() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -292,12 +292,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge4() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -311,12 +311,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge5() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -328,12 +328,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge6() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -347,12 +347,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge7() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(120).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(120).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -366,12 +366,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge8() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(120).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(120).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals(ImmutableList.of(ImmutableList.of(segments.get(4), segments.get(5))), merge(segments)); @@ -383,18 +383,18 @@ public class DruidCoordinatorSegmentMergerTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version("1") .shardSpec(new LinearShardSpec(1)) .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-02/P1D")) + .interval(Intervals.of("2012-01-02/P1D")) .version("1") .shardSpec(new LinearShardSpec(7)) .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-03/P1D")) + .interval(Intervals.of("2012-01-03/P1D")) .version("1") .shardSpec(new LinearShardSpec(1500)) .build() @@ -412,25 +412,25 @@ public class DruidCoordinatorSegmentMergerTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version("1") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-02/P1D")) + .interval(Intervals.of("2012-01-02/P1D")) .version("1") .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-03/P1D")) + .interval(Intervals.of("2012-01-03/P1D")) .version("1") .shardSpec(new LinearShardSpec(1500)) .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-04/P1D")) + .interval(Intervals.of("2012-01-04/P1D")) .version("1") .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-05/P1D")) + .interval(Intervals.of("2012-01-05/P1D")) .version("1") .build() ); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index ea963962428..d28757dc476 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -36,6 +36,7 @@ import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.discovery.NoopServiceAnnouncer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.metadata.MetadataRuleManager; import io.druid.metadata.MetadataSegmentManager; @@ -280,7 +281,7 @@ public class DruidCoordinatorTest extends CuratorTestBase DruidDataSource[] druidDataSources = { new DruidDataSource(dataSource, Collections.emptyMap()) }; - final DataSegment dataSegment = new DataSegment(dataSource, new Interval("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0); + final DataSegment dataSegment = new DataSegment(dataSource, Intervals.of("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0); druidDataSources[0].addSegment("0", dataSegment); EasyMock.expect(databaseSegmentManager.isStarted()).andReturn(true).anyTimes(); @@ -384,10 +385,10 @@ public class DruidCoordinatorTest extends CuratorTestBase { DruidDataSource dataSource = new DruidDataSource("test", new HashMap()); DataSegment[] segments = new DataSegment[]{ - getSegment("test", new Interval("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), - getSegment("test", new Interval("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")) + getSegment("test", Intervals.of("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), + getSegment("test", Intervals.of("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")) }; for (DataSegment segment : segments) { dataSource.addSegment(segment.getIdentifier(), segment); @@ -399,10 +400,10 @@ public class DruidCoordinatorTest extends CuratorTestBase EasyMock.replay(databaseSegmentManager); Set availableSegments = coordinator.getOrderedAvailableDataSegments(); DataSegment[] expected = new DataSegment[]{ - getSegment("test", new Interval("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), - getSegment("test", new Interval("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")) + getSegment("test", Intervals.of("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), + getSegment("test", Intervals.of("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")) }; Assert.assertEquals(expected.length, availableSegments.size()); Assert.assertEquals(expected, availableSegments.toArray()); diff --git a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java index 68e5d8cd895..108e1ac5c97 100644 --- a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java +++ b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java @@ -27,6 +27,7 @@ import com.google.common.collect.Lists; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DataSegmentChangeCallback; import io.druid.server.coordination.DataSegmentChangeHandler; import io.druid.server.coordination.DataSegmentChangeRequest; @@ -40,7 +41,6 @@ import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; import org.apache.curator.utils.ZKPaths; import org.joda.time.Duration; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -350,7 +350,7 @@ public class LoadQueuePeonTest extends CuratorTestBase { return DataSegment.builder() .dataSource("test_load_queue_peon") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec(ImmutableMap.of()) .version("2015-05-27T03:38:35.683Z") .dimensions(ImmutableList.of()) diff --git a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java index 4b33c928a8b..700972d25bc 100644 --- a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; @@ -73,9 +74,9 @@ public class ReservoirSegmentSamplerTest segment3 = EasyMock.createMock(DataSegment.class); segment4 = EasyMock.createMock(DataSegment.class); - DateTime start1 = new DateTime("2012-01-01"); - DateTime start2 = new DateTime("2012-02-01"); - DateTime version = new DateTime("2012-03-01"); + DateTime start1 = DateTimes.of("2012-01-01"); + DateTime start2 = DateTimes.of("2012-02-01"); + DateTime version = DateTimes.of("2012-03-01"); segment1 = new DataSegment( "datasource1", new Interval(start1, start1.plusHours(1)), diff --git a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java index 7f1bc806c65..30a97d0a9c6 100644 --- a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java @@ -24,11 +24,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -40,7 +40,7 @@ public class ServerHolderTest private static final List segments = ImmutableList.of( new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container1", "blobPath", "blobPath1"), null, @@ -51,7 +51,7 @@ public class ServerHolderTest ), new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container2", "blobPath", "blobPath2"), null, diff --git a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java index 9bd095be152..cdcdf436d90 100644 --- a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java +++ b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; import io.druid.server.coordinator.DruidCoordinator; @@ -46,7 +47,7 @@ public class DruidCoordinatorCleanupOvershadowedTest DruidCoordinatorCleanupOvershadowed druidCoordinatorCleanupOvershadowed; DruidCoordinator coordinator = EasyMock.createStrictMock(DruidCoordinator.class); private List availableSegments; - DateTime start = new DateTime("2012-01-01"); + DateTime start = DateTimes.of("2012-01-01"); DruidCluster druidCluster; private LoadQueuePeon mockPeon = EasyMock.createMock(LoadQueuePeon.class); private ImmutableDruidServer druidServer = EasyMock.createMock(ImmutableDruidServer.class); diff --git a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java index 89e1d6c9887..ae7ed642d48 100644 --- a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java @@ -21,6 +21,7 @@ package io.druid.server.coordinator.helper; import com.google.common.collect.ImmutableList; import io.druid.client.indexing.IndexingServiceClient; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.coordinator.TestDruidCoordinatorConfig; import org.easymock.EasyMock; @@ -41,45 +42,45 @@ public class DruidCoordinatorSegmentKillerTest testFindIntervalForKillTask(null, null); testFindIntervalForKillTask(ImmutableList.of(), null); - testFindIntervalForKillTask(ImmutableList.of(Interval.parse("2014/2015")), Interval.parse("2014/2015")); + testFindIntervalForKillTask(ImmutableList.of(Intervals.of("2014/2015")), Intervals.of("2014/2015")); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2016/2017")), - Interval.parse("2014/2017") + ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")), + Intervals.of("2014/2017") ); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2015/2016")), - Interval.parse("2014/2016") + ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2015/2016")), + Intervals.of("2014/2016") ); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2015/2016"), Interval.parse("2014/2015")), - Interval.parse("2014/2016") + ImmutableList.of(Intervals.of("2015/2016"), Intervals.of("2014/2015")), + Intervals.of("2014/2016") ); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2015/2017"), Interval.parse("2014/2016")), - Interval.parse("2014/2017") + ImmutableList.of(Intervals.of("2015/2017"), Intervals.of("2014/2016")), + Intervals.of("2014/2017") ); testFindIntervalForKillTask( ImmutableList.of( - Interval.parse("2015/2019"), - Interval.parse("2014/2016"), - Interval.parse("2018/2020") + Intervals.of("2015/2019"), + Intervals.of("2014/2016"), + Intervals.of("2018/2020") ), - Interval.parse("2014/2020") + Intervals.of("2014/2020") ); testFindIntervalForKillTask( ImmutableList.of( - Interval.parse("2015/2019"), - Interval.parse("2014/2016"), - Interval.parse("2018/2020"), - Interval.parse("2021/2022") + Intervals.of("2015/2019"), + Intervals.of("2014/2016"), + Intervals.of("2018/2020"), + Intervals.of("2021/2022") ), - Interval.parse("2014/2022") + Intervals.of("2014/2022") ); } diff --git a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java index ac76a368de5..93196fca4da 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; -import org.joda.time.Interval; +import io.druid.java.util.common.Intervals; import org.joda.time.Period; import org.junit.Test; import org.junit.runner.RunWith; @@ -47,9 +47,9 @@ public class BroadcastDistributionRuleSerdeTest new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of("large_source1", "large_source2"))}, new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of())}, new Object[]{new ForeverBroadcastDistributionRule(null)}, - new Object[]{new IntervalBroadcastDistributionRule(new Interval("0/1000"), ImmutableList.of("large_source"))}, - new Object[]{new IntervalBroadcastDistributionRule(new Interval("0/1000"), ImmutableList.of())}, - new Object[]{new IntervalBroadcastDistributionRule(new Interval("0/1000"), null)}, + new Object[]{new IntervalBroadcastDistributionRule(Intervals.of("0/1000"), ImmutableList.of("large_source"))}, + new Object[]{new IntervalBroadcastDistributionRule(Intervals.of("0/1000"), ImmutableList.of())}, + new Object[]{new IntervalBroadcastDistributionRule(Intervals.of("0/1000"), null)}, new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), ImmutableList.of("large_source"))}, new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), ImmutableList.of())}, new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), null)} diff --git a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java index 9aa2ff77931..f4719a141b9 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java @@ -26,6 +26,8 @@ import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; import io.druid.client.DruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; @@ -35,8 +37,6 @@ import io.druid.server.coordinator.SegmentReplicantLookup; import io.druid.server.coordinator.ServerHolder; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -61,8 +61,8 @@ public class BroadcastDistributionRuleTest { smallSegment = new DataSegment( "small_source", - new Interval("0/1000"), - new DateTime().toString(), + Intervals.of("0/1000"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -75,8 +75,8 @@ public class BroadcastDistributionRuleTest largeSegments.add( new DataSegment( "large_source", - new Interval((i * 1000) + "/" + ((i + 1) * 1000)), - new DateTime().toString(), + Intervals.of((i * 1000) + "/" + ((i + 1) * 1000)), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -91,8 +91,8 @@ public class BroadcastDistributionRuleTest largeSegments2.add( new DataSegment( "large_source2", - new Interval((i * 1000) + "/" + ((i + 1) * 1000)), - new DateTime().toString(), + Intervals.of((i * 1000) + "/" + ((i + 1) * 1000)), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -227,7 +227,7 @@ public class BroadcastDistributionRuleTest DruidCoordinatorRuntimeParams.newBuilder() .withDruidCluster(druidCluster) .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Lists.newArrayList( smallSegment, largeSegments.get(0), @@ -267,7 +267,7 @@ public class BroadcastDistributionRuleTest DruidCoordinatorRuntimeParams.newBuilder() .withDruidCluster(druidCluster) .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Lists.newArrayList( smallSegment, largeSegments.get(0), @@ -305,7 +305,7 @@ public class BroadcastDistributionRuleTest DruidCoordinatorRuntimeParams.newBuilder() .withDruidCluster(druidCluster) .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Lists.newArrayList( smallSegment, largeSegments.get(0), diff --git a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java index c3362cebcd2..a13209316a4 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; -import org.joda.time.Interval; +import io.druid.java.util.common.Intervals; import org.junit.Assert; import org.junit.Test; @@ -35,7 +35,7 @@ import org.junit.Test; public void testSerde() throws Exception { IntervalLoadRule rule = new IntervalLoadRule( - new Interval("0/3000"), + Intervals.of("0/3000"), ImmutableMap.of(DruidServer.DEFAULT_TIER, 2) ); @@ -49,7 +49,7 @@ import org.junit.Test; public void testSerdeNullTieredReplicants() throws Exception { IntervalLoadRule rule = new IntervalLoadRule( - new Interval("0/3000"), null + Intervals.of("0/3000"), null ); ObjectMapper jsonMapper = new DefaultObjectMapper(); diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java index c107400b7d0..31d8d57ad17 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java @@ -36,6 +36,8 @@ import com.metamx.emitter.core.LoggingEmitter; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.coordinator.BalancerStrategy; import io.druid.server.coordinator.CoordinatorDynamicConfig; @@ -205,7 +207,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -319,7 +321,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -412,7 +414,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -521,7 +523,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -612,7 +614,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(dataSegment1, dataSegment2, dataSegment3)) .withDynamicConfigs(new CoordinatorDynamicConfig.Builder().withMaxSegmentsInNodeLoadingQueue(2).build()) .build(); @@ -631,8 +633,8 @@ public class LoadRuleTest { return new DataSegment( dataSource, - new Interval("0/3000"), - new DateTime().toString(), + Intervals.of("0/3000"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), diff --git a/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java index 5d34502ed78..699136fc7ac 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java @@ -19,6 +19,7 @@ package io.druid.server.coordinator.rules; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; @@ -33,13 +34,13 @@ public class PeriodDropRuleTest { private final static DataSegment.Builder builder = DataSegment.builder() .dataSource("test") - .version(new DateTime("2012-12-31T01:00:00").toString()) + .version(DateTimes.of("2012-12-31T01:00:00").toString()) .shardSpec(NoneShardSpec.instance()); @Test public void testAppliesToAll() { - DateTime now = new DateTime("2012-12-31T01:00:00"); + DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodDropRule rule = new PeriodDropRule( new Period("P5000Y") ); @@ -67,7 +68,7 @@ public class PeriodDropRuleTest @Test public void testAppliesToPeriod() { - DateTime now = new DateTime("2012-12-31T01:00:00"); + DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodDropRule rule = new PeriodDropRule( new Period("P1M") ); diff --git a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java index 688a1711dd8..c3c17615755 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; @@ -37,27 +39,27 @@ public class PeriodLoadRuleTest { private final static DataSegment.Builder builder = DataSegment.builder() .dataSource("test") - .version(new DateTime().toString()) + .version(DateTimes.nowUtc().toString()) .shardSpec(NoneShardSpec.instance()); @Test public void testAppliesToAll() { - DateTime now = new DateTime("2013-01-01"); + DateTime now = DateTimes.of("2013-01-01"); PeriodLoadRule rule = new PeriodLoadRule( new Period("P5000Y"), ImmutableMap.of("", 0) ); - Assert.assertTrue(rule.appliesTo(builder.interval(new Interval("2012-01-01/2012-12-31")).build(), now)); - Assert.assertTrue(rule.appliesTo(builder.interval(new Interval("1000-01-01/2012-12-31")).build(), now)); - Assert.assertTrue(rule.appliesTo(builder.interval(new Interval("0500-01-01/2100-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("2012-01-01/2012-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("1000-01-01/2012-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("0500-01-01/2100-12-31")).build(), now)); } @Test public void testAppliesToPeriod() { - DateTime now = new DateTime("2012-12-31T01:00:00"); + DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodLoadRule rule = new PeriodLoadRule( new Period("P1M"), ImmutableMap.of("", 0) diff --git a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java index b9e31b48603..a254ff9d47e 100644 --- a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java @@ -25,6 +25,7 @@ import io.druid.client.CoordinatorServerView; import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; import io.druid.client.indexing.IndexingServiceClient; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.security.Access; import io.druid.server.security.Action; @@ -66,7 +67,7 @@ public class DatasourcesResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null, null, null, @@ -79,7 +80,7 @@ public class DatasourcesResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-22/P1D"), + Intervals.of("2010-01-22/P1D"), null, null, null, @@ -92,7 +93,7 @@ public class DatasourcesResourceTest dataSegmentList.add( new DataSegment( "datasource2", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null, null, null, @@ -260,7 +261,7 @@ public class DatasourcesResourceTest DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap()); dataSource1.addSegment( "partition", - new DataSegment("datasegment1", new Interval("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10) + new DataSegment("datasegment1", Intervals.of("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10) ); EasyMock.expect(server.getDataSource("datasource1")).andReturn( dataSource1 @@ -339,8 +340,8 @@ public class DatasourcesResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); Response response = datasourcesResource.getSegmentDataSourceIntervals("invalidDataSource", null, null); @@ -448,7 +449,7 @@ public class DatasourcesResourceTest public void testDeleteDataSourceSpecificInterval() throws Exception { String interval = "2010-01-01_P1D"; - Interval theInterval = new Interval(interval.replace("_", "/")); + Interval theInterval = Intervals.of(interval.replace("_", "/")); IndexingServiceClient indexingServiceClient = EasyMock.createStrictMock(IndexingServiceClient.class); indexingServiceClient.killSegments("datasource1", theInterval); diff --git a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java index 3c437cc8f2a..5ec8f6857d9 100644 --- a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java +++ b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java @@ -22,6 +22,7 @@ package io.druid.server.http; import com.google.common.collect.ImmutableList; import io.druid.client.DruidServer; import io.druid.client.InventoryView; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.security.AuthConfig; import io.druid.timeline.DataSegment; @@ -57,7 +58,7 @@ public class IntervalsResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-01T00:00:00.000Z/P1D"), + Intervals.of("2010-01-01T00:00:00.000Z/P1D"), null, null, null, @@ -70,7 +71,7 @@ public class IntervalsResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-22T00:00:00.000Z/P1D"), + Intervals.of("2010-01-22T00:00:00.000Z/P1D"), null, null, null, @@ -83,7 +84,7 @@ public class IntervalsResourceTest dataSegmentList.add( new DataSegment( "datasource2", - new Interval("2010-01-01T00:00:00.000Z/P1D"), + Intervals.of("2010-01-01T00:00:00.000Z/P1D"), null, null, null, @@ -108,8 +109,8 @@ public class IntervalsResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); - expectedIntervals.add(new Interval("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); Response response = intervalsResource.getIntervals(request); @@ -135,7 +136,7 @@ public class IntervalsResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); Response response = intervalsResource.getSpecificIntervals("2010-01-01T00:00:00.000Z/P1D", "simple", null, request); @@ -156,7 +157,7 @@ public class IntervalsResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); Response response = intervalsResource.getSpecificIntervals("2010-01-01T00:00:00.000Z/P1D", null, "full", request); diff --git a/server/src/test/java/io/druid/server/http/RulesResourceTest.java b/server/src/test/java/io/druid/server/http/RulesResourceTest.java index d153397cee9..b322c3bf580 100644 --- a/server/src/test/java/io/druid/server/http/RulesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/RulesResourceTest.java @@ -23,9 +23,10 @@ import com.google.common.collect.ImmutableList; import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataRuleManager; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; @@ -59,7 +60,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -70,7 +71,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("datasource1"), EasyMock.eq("rules"), EasyMock.eq(2))) .andReturn(ImmutableList.of(entry1, entry2)) @@ -92,7 +93,7 @@ public class RulesResourceTest public void testGetDatasourceRuleHistoryWithInterval() { String interval = "P2D/2013-01-02T00:00:00Z"; - Interval theInterval = new Interval(interval); + Interval theInterval = Intervals.of(interval); AuditEntry entry1 = new AuditEntry( "testKey", "testType", @@ -102,7 +103,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -113,7 +114,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("datasource1"), EasyMock.eq("rules"), EasyMock.eq(theInterval))) .andReturn(ImmutableList.of(entry1, entry2)) @@ -162,7 +163,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -173,7 +174,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("rules"), EasyMock.eq(2))) .andReturn(ImmutableList.of(entry1, entry2)) @@ -195,7 +196,7 @@ public class RulesResourceTest public void testGetAllDatasourcesRuleHistoryWithInterval() { String interval = "P2D/2013-01-02T00:00:00Z"; - Interval theInterval = new Interval(interval); + Interval theInterval = Intervals.of(interval); AuditEntry entry1 = new AuditEntry( "testKey", "testType", @@ -205,7 +206,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -216,7 +217,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("rules"), EasyMock.eq(theInterval))) .andReturn(ImmutableList.of(entry1, entry2)) diff --git a/server/src/test/java/io/druid/server/http/ServersResourceTest.java b/server/src/test/java/io/druid/server/http/ServersResourceTest.java index c9842cb195c..3d3431e39f7 100644 --- a/server/src/test/java/io/druid/server/http/ServersResourceTest.java +++ b/server/src/test/java/io/druid/server/http/ServersResourceTest.java @@ -24,11 +24,11 @@ import com.google.common.collect.ImmutableList; import io.druid.client.CoordinatorServerView; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -47,7 +47,7 @@ public class ServersResourceTest DruidServer dummyServer = new DruidServer("dummy", "host", null, 1234L, ServerType.HISTORICAL, "tier", 0); DataSegment segment = DataSegment.builder() .dataSource("dataSource") - .interval(new Interval("2016-03-22T14Z/2016-03-22T15Z")) + .interval(Intervals.of("2016-03-22T14Z/2016-03-22T15Z")) .version("v0") .size(1L) .build(); diff --git a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java index bb843506cc7..cc2e46c05b6 100644 --- a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java @@ -23,6 +23,7 @@ import com.google.common.primitives.Longs; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.announcement.Announcer; +import io.druid.java.util.common.StringUtils; import io.druid.segment.CloserRule; import io.druid.server.http.HostAndPortWithScheme; import io.druid.server.initialization.ZkPathsConfig; @@ -90,7 +91,7 @@ public class ListenerResourceAnnouncerTest extends CuratorTestBase } }); Assert.assertNotNull(curator.checkExists().forPath(announcePath)); - final String nodePath = ZKPaths.makePath(announcePath, String.format("%s:%s", node.getScheme(), node.getHostText())); + final String nodePath = ZKPaths.makePath(announcePath, StringUtils.format("%s:%s", node.getScheme(), node.getHostText())); Assert.assertNotNull(curator.checkExists().forPath(nodePath)); Assert.assertEquals(Longs.BYTES, curator.getData().decompressed().forPath(nodePath).length); Assert.assertNull(curator.checkExists() @@ -124,7 +125,7 @@ public class ListenerResourceAnnouncerTest extends CuratorTestBase announcer.announce( - EasyMock.eq(ZKPaths.makePath(announcePath, String.format("%s:%s", node.getScheme(), node.getHostText()))), + EasyMock.eq(ZKPaths.makePath(announcePath, StringUtils.format("%s:%s", node.getScheme(), node.getHostText()))), EasyMock.aryEq(resourceAnnouncer.getAnnounceBytes()) ); EasyMock.expectLastCall().once(); diff --git a/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java b/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java index cfb1d319138..2b1a4ac76af 100644 --- a/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java +++ b/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java @@ -21,6 +21,7 @@ package io.druid.server.log; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.io.CharStreams; +import io.druid.java.util.common.DateTimes; import io.druid.server.RequestLogLine; import org.easymock.EasyMock; import org.joda.time.DateTime; @@ -47,7 +48,7 @@ public class FileRequestLoggerTest @Test public void testLog() throws IOException { ObjectMapper objectMapper = new ObjectMapper(); - DateTime dateTime = new DateTime(); + DateTime dateTime = DateTimes.nowUtc(); File logDir = temporaryFolder.newFolder(); String actualLogString = dateTime.toString() + "\t" + HOST; diff --git a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java index aa8f8a51cac..8b1985300b2 100644 --- a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java @@ -25,6 +25,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.LegacyDataSource; @@ -72,7 +73,7 @@ public class LoggingRequestLoggerTest @Override public List getIntervals() { - return Collections.singletonList(Interval.parse("2016-01-01T00Z/2016-01-02T00Z")); + return Collections.singletonList(Intervals.of("2016-01-01T00Z/2016-01-02T00Z")); } @Override diff --git a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java index 0acc6bd6f08..f97440bdad5 100644 --- a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java +++ b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java @@ -27,6 +27,7 @@ import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceEventBuilder; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.client.DruidServerConfig; +import io.druid.java.util.common.Intervals; import io.druid.server.SegmentManager; import io.druid.server.coordination.ZkCoordinator; import io.druid.timeline.DataSegment; @@ -34,7 +35,6 @@ import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -67,7 +67,7 @@ public class HistoricalMetricsMonitorTest extends EasyMockSupport final String dataSource = "dataSource"; final DataSegment dataSegment = new DataSegment( dataSource, - Interval.parse("2014/2015"), + Intervals.of("2014/2015"), "version", ImmutableMap.of(), ImmutableList.of(), diff --git a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java index 2e07c418058..1f9c3f2f0fd 100644 --- a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java +++ b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java @@ -20,10 +20,10 @@ package io.druid.server.router; import com.google.common.collect.ImmutableMap; - import io.druid.client.DruidServer; import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.Query; import io.druid.query.TableDataSource; @@ -126,7 +126,7 @@ public class QueryHostFinderTest Server server = queryRunner.findServer( new TimeBoundaryQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(Arrays.asList(new Interval("2011-08-31/2011-09-01"))), + new MultipleIntervalSegmentSpec(Arrays.asList(Intervals.of("2011-08-31/2011-09-01"))), null, null, null diff --git a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java index 52d3808e9b1..5f9ccc4f91c 100644 --- a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java +++ b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java @@ -28,6 +28,7 @@ import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.Intervals; import io.druid.query.Druids; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -114,7 +115,7 @@ public class TieredBrokerHostSelectorTest .dataSource("test") .granularity("all") .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(new Interval("2011-08-31/2011-09-01"))) + .intervals(Arrays.asList(Intervals.of("2011-08-31/2011-09-01"))) .build() ).lhs; @@ -130,7 +131,7 @@ public class TieredBrokerHostSelectorTest .dataSource("test") .granularity("all") .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(new Interval("2013-08-31/2013-09-01"))) + .intervals(Arrays.asList(Intervals.of("2013-08-31/2013-09-01"))) .build() ).lhs; @@ -145,7 +146,7 @@ public class TieredBrokerHostSelectorTest .dataSource("test") .granularity("all") .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(new Interval("2010-08-31/2010-09-01"))) + .intervals(Arrays.asList(Intervals.of("2010-08-31/2010-09-01"))) .build() ).lhs; @@ -162,9 +163,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2013-08-31/2013-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2011-08-31/2011-09-01") + Intervals.of("2013-08-31/2013-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2011-08-31/2011-09-01") ) ) ).build() @@ -183,9 +184,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2011-08-31/2011-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2013-08-31/2013-09-01") + Intervals.of("2011-08-31/2011-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2013-08-31/2013-09-01") ) ) ).build() @@ -204,9 +205,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2011-08-31/2011-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2013-08-31/2013-09-01") + Intervals.of("2011-08-31/2011-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2013-08-31/2013-09-01") ) ) ) @@ -227,9 +228,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2011-08-31/2011-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2013-08-31/2013-09-01") + Intervals.of("2011-08-31/2011-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2013-08-31/2013-09-01") ) ) ) @@ -262,10 +263,10 @@ public class TieredBrokerHostSelectorTest public List getRulesWithDefault(String dataSource) { return Arrays.asList( - new IntervalLoadRule(new Interval("2013/2014"), ImmutableMap.of("hot", 1)), - new IntervalLoadRule(new Interval("2012/2013"), ImmutableMap.of("medium", 1)), + new IntervalLoadRule(Intervals.of("2013/2014"), ImmutableMap.of("hot", 1)), + new IntervalLoadRule(Intervals.of("2012/2013"), ImmutableMap.of("medium", 1)), new IntervalLoadRule( - new Interval("2011/2012"), + Intervals.of("2011/2012"), ImmutableMap.of(DruidServer.DEFAULT_TIER, 1) ) ); diff --git a/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java b/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java index 33187c17a98..d17d0e36e20 100644 --- a/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import io.druid.TestUtil; +import io.druid.java.util.common.Intervals; import io.druid.timeline.TimelineObjectHolder; import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.NumberedShardSpec; @@ -183,7 +184,7 @@ public class NumberedShardSpecTest ) { VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); - Interval interval = new Interval("2000/3000"); + Interval interval = Intervals.of("2000/3000"); String version = "v1"; for (PartitionChunk chunk : chunks) { timeline.add(interval, version, chunk); diff --git a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java index 23b837edb34..e7688a52de9 100644 --- a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java @@ -28,6 +28,7 @@ import io.druid.TestUtil; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import org.joda.time.DateTime; @@ -143,7 +144,7 @@ public class HashBasedNumberedShardSpecTest ImmutableList.of("visitor_id"), TestUtil.MAPPER ); - final DateTime time = new DateTime(); + final DateTime time = DateTimes.nowUtc(); final InputRow inputRow = new MapBasedInputRow( time, ImmutableList.of("visitor_id", "cnt"), @@ -220,7 +221,7 @@ public class HashBasedNumberedShardSpecTest @Override public DateTime getTimestamp() { - return new DateTime(0); + return DateTimes.EPOCH; } @Override diff --git a/services/pom.xml b/services/pom.xml index 7752423c7e2..506b4a29c27 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -71,6 +71,27 @@ + + de.thetaphi + forbiddenapis + + + validate + validate + + check + testCheck + + + + + jdk-unsafe + + + + + org.apache.maven.plugins maven-shade-plugin diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java index 222c063f654..f3c819ca05b 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java @@ -29,6 +29,7 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -40,7 +41,6 @@ import org.apache.calcite.avatica.NoSuchConnectionException; import org.apache.calcite.avatica.NoSuchStatementException; import org.apache.calcite.avatica.QueryState; import org.apache.calcite.avatica.remote.TypedValue; -import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nonnull; @@ -552,7 +552,7 @@ public class DruidMeta extends MetaImpl log.debug("Connection[%s] timed out.", connectionId); closeConnection(new ConnectionHandle(connectionId)); }, - new Interval(new DateTime(), config.getConnectionIdleTimeout()).toDurationMillis(), + new Interval(DateTimes.nowUtc(), config.getConnectionIdleTimeout()).toDurationMillis(), TimeUnit.MILLISECONDS ) ); diff --git a/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java b/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java index fff3aa13277..824aad70de9 100644 --- a/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java +++ b/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.aggregation; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -104,7 +105,7 @@ public class ApproxCountDistinctSqlAggregator implements SqlAggregator dimensionSpec = input.getSimpleExtraction().toDimensionSpec(null, ValueType.STRING); } else { final ExpressionVirtualColumn virtualColumn = input.toVirtualColumn( - String.format("%s:v", name), + StringUtils.format("%s:v", name), inputType, plannerContext.getExprMacroTable() ); diff --git a/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java b/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java index 2a7ee4c49ad..d5da02d37b7 100644 --- a/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java +++ b/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.aggregation; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; @@ -84,7 +85,7 @@ public class DimensionExpression @Nullable public String getVirtualColumnName() { - return expression.isSimpleExtraction() ? null : String.format("%s:v", outputName); + return expression.isSimpleExtraction() ? null : StringUtils.format("%s:v", outputName); } @Override diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java index 10bca1f43b3..582d7397c52 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -60,7 +61,7 @@ public class CeilOperatorConversion implements SqlOperatorConversion // CEIL(expr) return druidExpression.map( simpleExtraction -> null, - expression -> String.format("ceil(%s)", expression) + expression -> StringUtils.format("ceil(%s)", expression) ); } else if (call.getOperands().size() == 2) { // CEIL(expr TO timeUnit) diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java index e3cac19bafd..86475a39049 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.expression; import com.google.common.base.Preconditions; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Chars; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprMacroTable; import io.druid.math.expr.Parser; @@ -64,7 +65,7 @@ public class DruidExpression public static DruidExpression fromColumn(final String column) { - return new DruidExpression(SimpleExtraction.of(column, null), String.format("\"%s\"", escape(column))); + return new DruidExpression(SimpleExtraction.of(column, null), StringUtils.format("\"%s\"", escape(column))); } public static DruidExpression fromExpression(final String expression) diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java index 770844184c2..6b8f9ed7561 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java @@ -24,7 +24,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.ExprType; @@ -58,7 +60,6 @@ import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeName; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; @@ -265,7 +266,7 @@ public class Expressions // Ignore casts for simple extractions (use Function.identity) since it is ok in many cases. typeCastExpression = operandExpression.map( Function.identity(), - expression -> String.format("CAST(%s, '%s')", expression, toExprType.toString()) + expression -> StringUtils.format("CAST(%s, '%s')", expression, toExprType.toString()) ); } else { typeCastExpression = operandExpression; @@ -301,7 +302,7 @@ public class Expressions return null; } else if (UNARY_PREFIX_OPERATOR_MAP.containsKey(operator)) { return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s)", UNARY_PREFIX_OPERATOR_MAP.get(operator), Iterables.getOnlyElement(operands).getExpression() @@ -309,7 +310,7 @@ public class Expressions ); } else if (UNARY_SUFFIX_OPERATOR_MAP.containsKey(operator)) { return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s)", Iterables.getOnlyElement(operands).getExpression(), UNARY_SUFFIX_OPERATOR_MAP.get(operator) @@ -320,7 +321,7 @@ public class Expressions throw new ISE("WTF?! Got binary operator[%s] with %s args?", kind, operands.size()); } return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s %s)", operands.get(0).getExpression(), BINARY_OPERATOR_MAP.get(operator), @@ -522,7 +523,7 @@ public class Expressions if (granularity != null) { // lhs is FLOOR(__time TO granularity); rhs must be a timestamp final long rhsMillis = Calcites.calciteDateTimeLiteralToJoda(rhs, plannerContext.getTimeZone()).getMillis(); - final Interval rhsInterval = granularity.bucket(new DateTime(rhsMillis)); + final Interval rhsInterval = granularity.bucket(DateTimes.utc(rhsMillis)); // Is rhs aligned on granularity boundaries? final boolean rhsAligned = rhsInterval.getStartMillis() == rhsMillis; diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java index 822d6ad358e..eac1b857754 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java @@ -19,6 +19,7 @@ package io.druid.sql.calcite.expression; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -57,7 +58,7 @@ public class FloorOperatorConversion implements SqlOperatorConversion // FLOOR(expr) return druidExpression.map( simpleExtraction -> null, // BucketExtractionFn could do this, but it's lame since it returns strings. - expression -> String.format("floor(%s)", expression) + expression -> StringUtils.format("floor(%s)", expression) ); } else if (call.getOperands().size() == 2) { // FLOOR(expr TO timeUnit) diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java index a6118b21e1b..677c83f9273 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.inject.Inject; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.query.lookup.LookupReferencesManager; import io.druid.query.lookup.RegisteredLookupExtractionFn; @@ -65,7 +66,7 @@ public class LookupOperatorConversion implements SqlOperatorConversion plannerContext, rowSignature, rexNode, - calciteOperator().getName().toLowerCase(), + StringUtils.toLowerCase(calciteOperator().getName()), inputExpressions -> { final DruidExpression arg = inputExpressions.get(0); final Expr lookupNameExpr = inputExpressions.get(1).parse(plannerContext.getExprMacroTable()); diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java index 8879e8313a2..731c4413ed2 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java @@ -19,6 +19,7 @@ package io.druid.sql.calcite.expression; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.sql.calcite.planner.PlannerContext; @@ -58,7 +59,7 @@ public class RegexpExtractOperatorConversion implements SqlOperatorConversion plannerContext, rowSignature, rexNode, - calciteOperator().getName().toLowerCase(), + StringUtils.toLowerCase(calciteOperator().getName()), inputExpressions -> { final DruidExpression arg = inputExpressions.get(0); final Expr patternExpr = inputExpressions.get(1).parse(plannerContext.getExprMacroTable()); diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java b/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java index b51841b8f1d..53476dfd2a7 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.common.base.Preconditions; +import io.druid.java.util.common.StringUtils; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; @@ -107,7 +108,7 @@ public class SimpleExtraction public String toString() { if (extractionFn != null) { - return String.format("%s(%s)", extractionFn, column); + return StringUtils.format("%s(%s)", extractionFn, column); } else { return column; } diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java index 6da872b8447..d1c169d6024 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java @@ -19,6 +19,7 @@ package io.druid.sql.calcite.expression; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.SubstringDimExtractionFn; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -65,7 +66,7 @@ public class SubstringOperatorConversion implements SqlOperatorConversion return input.map( simpleExtraction -> simpleExtraction.cascade(new SubstringDimExtractionFn(index, length < 0 ? null : length)), - expression -> String.format( + expression -> StringUtils.format( "substring(%s, %s, %s)", expression, DruidExpression.numberLiteral(index), diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java index aaa119b2e1a..8a0feef119b 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.expression; import com.google.common.base.Preconditions; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; import org.apache.calcite.rex.RexCall; @@ -85,7 +86,7 @@ public abstract class TimeArithmeticOperatorConversion implements SqlOperatorCon timeExpr, shiftExpr.map( simpleExtraction -> null, - expression -> String.format("concat('P', %s, 'M')", expression) + expression -> StringUtils.format("concat('P', %s, 'M')", expression) ), DruidExpression.fromExpression(DruidExpression.numberLiteral(direction > 0 ? 1 : -1)) ) @@ -94,7 +95,7 @@ public abstract class TimeArithmeticOperatorConversion implements SqlOperatorCon // timestamp_expr { + | - } (day-time interval) // Period is a value in milliseconds. Ignore time zone. return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s %s)", timeExpr.getExpression(), direction > 0 ? "+" : "-", diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java index c1d7537b9ed..e6fbd3bcf46 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.StringUtils; import io.druid.query.expression.TimestampExtractExprMacro; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -77,7 +78,7 @@ public class TimeExtractOperatorConversion implements SqlOperatorConversion ); } }, - expression -> String.format( + expression -> StringUtils.format( "timestamp_extract(%s,%s,%s)", expression, DruidExpression.stringLiteral(unit.name()), @@ -107,7 +108,7 @@ public class TimeExtractOperatorConversion implements SqlOperatorConversion } final TimestampExtractExprMacro.Unit unit = TimestampExtractExprMacro.Unit.valueOf( - RexLiteral.stringValue(call.getOperands().get(1)).toUpperCase() + StringUtils.toUpperCase(RexLiteral.stringValue(call.getOperands().get(1))) ); final DateTimeZone timeZone = call.getOperands().size() > 2 && !RexLiteral.isNullLiteral(call.getOperands().get(2)) diff --git a/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java b/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java index 42dbaef0ab1..c241fab86ae 100644 --- a/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java +++ b/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java @@ -21,7 +21,7 @@ package io.druid.sql.calcite.filtration; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.ISE; import io.druid.math.expr.ExprMacroTable; import io.druid.query.filter.DimFilter; @@ -35,7 +35,6 @@ import java.util.List; public class Filtration { - private static final Interval ETERNITY = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); private static final DimFilter MATCH_NOTHING = new ExpressionDimFilter( "1 == 2", ExprMacroTable.nil() ); @@ -51,13 +50,13 @@ public class Filtration private Filtration(final DimFilter dimFilter, final List intervals) { - this.intervals = intervals != null ? intervals : ImmutableList.of(ETERNITY); + this.intervals = intervals != null ? intervals : Intervals.ONLY_ETERNITY; this.dimFilter = dimFilter; } public static Interval eternity() { - return ETERNITY; + return Intervals.ETERNITY; } public static DimFilter matchNothing() diff --git a/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java b/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java index 345a7e748d2..076d8d2ea17 100644 --- a/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java +++ b/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import java.util.List; @@ -128,7 +129,7 @@ public class RangeSets end = Filtration.eternity().getEndMillis(); } - retVal.add(new Interval(start, end)); + retVal.add(Intervals.utc(start, end)); } return retVal; diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java index afbfac840fc..779b7928176 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java @@ -21,6 +21,7 @@ package io.druid.sql.calcite.planner; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Chars; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; @@ -165,7 +166,7 @@ public class Calcites public static int jodaToCalciteDate(final DateTime dateTime, final DateTimeZone timeZone) { final DateTime date = dateTime.withZone(timeZone).dayOfMonth().roundFloorCopy(); - return Days.daysBetween(new DateTime(0L, DateTimeZone.UTC), date.withZoneRetainFields(DateTimeZone.UTC)).getDays(); + return Days.daysBetween(DateTimes.EPOCH, date.withZoneRetainFields(DateTimeZone.UTC)).getDays(); } /** @@ -179,8 +180,7 @@ public class Calcites */ public static Calendar jodaToCalciteCalendarLiteral(final DateTime dateTime, final DateTimeZone timeZone) { - final Calendar calendar = Calendar.getInstance(Locale.ENGLISH); - calendar.setTimeZone(GMT_TIME_ZONE); + final Calendar calendar = Calendar.getInstance(GMT_TIME_ZONE, Locale.ENGLISH); calendar.setTimeInMillis(Calcites.jodaToCalciteTimestamp(dateTime, timeZone)); return calendar; } @@ -228,7 +228,7 @@ public class Calcites */ public static DateTime calciteDateToJoda(final int date, final DateTimeZone timeZone) { - return new DateTime(0L, DateTimeZone.UTC).plusDays(date).withZoneRetainFields(timeZone); + return DateTimes.EPOCH.plusDays(date).withZoneRetainFields(timeZone); } /** diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java index 33a056dc75d..563c0ab11e2 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.planner; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprType; @@ -31,7 +32,6 @@ import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexExecutor; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.type.SqlTypeName; -import org.joda.time.DateTime; import java.math.BigDecimal; import java.util.List; @@ -77,7 +77,7 @@ public class DruidRexExecutor implements RexExecutor literalValue = exprResult.asBoolean(); } else if (sqlTypeName == SqlTypeName.DATE || sqlTypeName == SqlTypeName.TIMESTAMP) { literalValue = Calcites.jodaToCalciteCalendarLiteral( - new DateTime(exprResult.asLong()), + DateTimes.utc(exprResult.asLong()), plannerContext.getTimeZone() ); } else if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) { diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java index 65fafeeee9a..5fc62de7d54 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java @@ -266,7 +266,7 @@ public class DruidSemiJoin extends DruidRel } else { bounds.add( new ExpressionDimFilter( - String.format( + StringUtils.format( "(%s == %s)", leftExpression.getExpression(), DruidExpression.stringLiteral(values.get(i)) diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java index 2faf3db2bd8..0fc5e27c5c1 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java @@ -25,6 +25,7 @@ import com.google.common.base.Strings; import com.google.common.collect.Iterables; import com.google.common.primitives.Ints; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -373,33 +374,9 @@ public class QueryMaker } else if (value == null) { coercedValue = null; } else if (sqlType == SqlTypeName.DATE) { - final DateTime dateTime; - - if (value instanceof Number) { - dateTime = new DateTime(((Number) value).longValue()); - } else if (value instanceof String) { - dateTime = new DateTime(Long.parseLong((String) value)); - } else if (value instanceof DateTime) { - dateTime = (DateTime) value; - } else { - throw new ISE("Cannot coerce[%s] to %s", value.getClass().getName(), sqlType); - } - - return Calcites.jodaToCalciteDate(dateTime, plannerContext.getTimeZone()); + return Calcites.jodaToCalciteDate(coerceDateTime(value, sqlType), plannerContext.getTimeZone()); } else if (sqlType == SqlTypeName.TIMESTAMP) { - final DateTime dateTime; - - if (value instanceof Number) { - dateTime = new DateTime(((Number) value).longValue()); - } else if (value instanceof String) { - dateTime = new DateTime(Long.parseLong((String) value)); - } else if (value instanceof DateTime) { - dateTime = (DateTime) value; - } else { - throw new ISE("Cannot coerce[%s] to %s", value.getClass().getName(), sqlType); - } - - return Calcites.jodaToCalciteTimestamp(dateTime, plannerContext.getTimeZone()); + return Calcites.jodaToCalciteTimestamp(coerceDateTime(value, sqlType), plannerContext.getTimeZone()); } else if (sqlType == SqlTypeName.BOOLEAN) { if (value instanceof String) { coercedValue = Evals.asBoolean(((String) value)); @@ -446,4 +423,20 @@ public class QueryMaker return coercedValue; } + + private static DateTime coerceDateTime(Object value, SqlTypeName sqlType) + { + final DateTime dateTime; + + if (value instanceof Number) { + dateTime = DateTimes.utc(((Number) value).longValue()); + } else if (value instanceof String) { + dateTime = DateTimes.utc(Long.parseLong((String) value)); + } else if (value instanceof DateTime) { + dateTime = (DateTime) value; + } else { + throw new ISE("Cannot coerce[%s] to %s", value.getClass().getName(), sqlType); + } + return dateTime; + } } diff --git a/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java b/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java index 4e19c617cbf..4ceaa54adb5 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java +++ b/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java @@ -25,6 +25,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.math.expr.ExprType; import io.druid.query.aggregation.AggregatorFactory; @@ -856,8 +857,8 @@ public class GroupByRules createMaxAggregatorFactory(aggregationType, name, fieldName, expression, macroTable) ); } else if (kind == SqlKind.AVG) { - final String sumName = String.format("%s:sum", name); - final String countName = String.format("%s:count", name); + final String sumName = StringUtils.format("%s:sum", name); + final String countName = StringUtils.format("%s:count", name); final AggregatorFactory sum = createSumAggregatorFactory( aggregationType, sumName, diff --git a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java index 9915d84fd57..a662fc04f0a 100644 --- a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java +++ b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java @@ -33,6 +33,7 @@ import com.metamx.emitter.EmittingLogger; import io.druid.client.ServerView; import io.druid.client.TimelineServerView; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.guava.Sequence; @@ -58,7 +59,6 @@ import io.druid.sql.calcite.view.ViewManager; import io.druid.timeline.DataSegment; import org.apache.calcite.schema.Table; import org.apache.calcite.schema.impl.AbstractSchema; -import org.joda.time.DateTime; import java.io.IOException; import java.util.Comparator; @@ -182,7 +182,8 @@ public class DruidSchema extends AbstractSchema try { synchronized (lock) { - final long nextRefreshNoFuzz = new DateTime(lastRefresh) + final long nextRefreshNoFuzz = DateTimes + .utc(lastRefresh) .plus(config.getMetadataRefreshPeriod()) .getMillis(); diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java index bd96ce11057..052869d17cb 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -30,6 +30,7 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; @@ -199,8 +200,8 @@ public class DruidAvaticaHandlerTest Assert.assertEquals( ImmutableList.of( ImmutableMap.of( - "__time", new Timestamp(new DateTime("2000-01-01T00:00:00.000Z").getMillis()), - "t2", new Date(new DateTime("2000-01-01").getMillis()) + "__time", new Timestamp(DateTimes.of("2000-01-01T00:00:00.000Z").getMillis()), + "t2", new Date(DateTimes.of("2000-01-01").getMillis()) ) ), getRows(resultSet) diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java index af10663a8a3..f922b51b10f 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java @@ -21,6 +21,7 @@ package io.druid.sql.avatica; import com.google.common.base.Function; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.math.expr.ExprMacroTable; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; @@ -32,7 +33,6 @@ import io.druid.sql.calcite.util.QueryLogHook; import io.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.avatica.Meta; -import org.joda.time.DateTime; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -133,12 +133,12 @@ public class DruidStatementTest 0, true, Lists.newArrayList( - new Object[]{new DateTime("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, - new Object[]{new DateTime("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f}, - new Object[]{new DateTime("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, - new Object[]{new DateTime("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, - new Object[]{new DateTime("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, - new Object[]{new DateTime("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} + new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, + new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f}, + new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, + new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, + new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, + new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} ) ), frame @@ -159,8 +159,8 @@ public class DruidStatementTest 0, false, Lists.newArrayList( - new Object[]{new DateTime("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, - new Object[]{new DateTime("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f} + new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, + new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f} ) ), frame @@ -174,10 +174,10 @@ public class DruidStatementTest 2, true, Lists.newArrayList( - new Object[]{new DateTime("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, - new Object[]{new DateTime("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, - new Object[]{new DateTime("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, - new Object[]{new DateTime("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} + new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, + new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, + new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, + new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} ) ), frame diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java index 156ed170bb3..18dcf54ef5d 100644 --- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java @@ -23,6 +23,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.hll.HLLCV1; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -100,6 +102,7 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -2363,7 +2366,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2386,7 +2389,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2406,7 +2409,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2000-01-01T00:00:00.001"))) + .intervals(QSS(Intervals.of("2000-01-01/2000-01-01T00:00:00.001"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2429,8 +2432,8 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals( QSS( - new Interval("2000-01-01/2000-01-01T00:00:00.001"), - new Interval("2000-01-02/2000-01-02T00:00:00.001") + Intervals.of("2000-01-01/2000-01-01T00:00:00.001"), + Intervals.of("2000-01-02/2000-01-02T00:00:00.001") ) ) .granularity(Granularities.ALL) @@ -2460,7 +2463,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/2001"), new Interval("2002-05-01/2003-05-01"))) + .intervals(QSS(Intervals.of("2000/2001"), Intervals.of("2002-05-01/2003-05-01"))) .granularity(Granularities.ALL) .filters( AND( @@ -2538,9 +2541,9 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals( QSS( - new Interval(Filtration.eternity().getStart(), new DateTime("2000")), - new Interval("2001/2003"), - new Interval(new DateTime("2004"), Filtration.eternity().getEnd()) + new Interval(DateTimes.MIN, DateTimes.of("2000")), + Intervals.of("2001/2003"), + new Interval(DateTimes.of("2004"), DateTimes.MAX) ) ) .filters(NOT(SELECTOR("dim1", "xxx", null))) @@ -2565,7 +2568,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) .filters(NOT(SELECTOR("dim2", "a", null))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) @@ -2629,8 +2632,8 @@ public class CalciteQueryTest .filters( BOUND( "cnt", - String.valueOf(new DateTime("1970-01-01").getMillis()), - String.valueOf(new DateTime("1970-01-02").getMillis()), + String.valueOf(DateTimes.of("1970-01-01").getMillis()), + String.valueOf(DateTimes.of("1970-01-02").getMillis()), false, true, null, @@ -2662,8 +2665,8 @@ public class CalciteQueryTest .filters( BOUND( "cnt", - String.valueOf(new DateTime("1970-01-01").getMillis()), - String.valueOf(new DateTime("1970-01-02").getMillis()), + String.valueOf(DateTimes.of("1970-01-01").getMillis()), + String.valueOf(DateTimes.of("1970-01-02").getMillis()), false, true, null, @@ -2760,8 +2763,8 @@ public class CalciteQueryTest .setDimFilter( BOUND( "cnt", - String.valueOf(new DateTime("1970-01-01").getMillis()), - String.valueOf(new DateTime("1970-01-02").getMillis()), + String.valueOf(DateTimes.of("1970-01-01").getMillis()), + String.valueOf(DateTimes.of("1970-01-02").getMillis()), false, true, null, @@ -3911,7 +3914,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/P2M"))) + .intervals(QSS(Intervals.of("2000/P2M"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -3934,7 +3937,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01T01:02/2002"))) + .intervals(QSS(Intervals.of("2000-01-01T01:02/2002"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -3971,7 +3974,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-02T00Z/2002-01-01T08Z"))) + .intervals(QSS(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) @@ -3991,7 +3994,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-02/2002"))) + .intervals(QSS(Intervals.of("2000-01-02/2002"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4016,7 +4019,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-02T00Z/2002-01-01T08Z"))) + .intervals(QSS(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) @@ -4039,8 +4042,8 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS( - new Interval(Filtration.eternity().getStart(), new DateTime("2001-01-01")), - new Interval(new DateTime("2001-02-01"), Filtration.eternity().getEnd()) + new Interval(DateTimes.MIN, DateTimes.of("2001-01-01")), + new Interval(DateTimes.of("2001-02-01"), DateTimes.MAX) )) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) @@ -4063,7 +4066,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval(Filtration.eternity().getStart(), new DateTime("2000-02-01")))) + .intervals(QSS(new Interval(DateTimes.MIN, DateTimes.of("2000-02-01")))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4085,7 +4088,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval(Filtration.eternity().getStart(), new DateTime("2000-03-01")))) + .intervals(QSS(new Interval(DateTimes.MIN, DateTimes.of("2000-03-01")))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4108,7 +4111,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/P1M"))) + .intervals(QSS(Intervals.of("2000/P1M"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4131,7 +4134,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-02-01/P2M"), new Interval("2000-05-01/P1M"))) + .intervals(QSS(Intervals.of("2000-02-01/P2M"), Intervals.of("2000-05-01/P1M"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4460,7 +4463,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-02-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-02-01"))) .granularity(Granularities.ALL) .aggregators(AGGS( new FilteredAggregatorFactory( @@ -4618,7 +4621,7 @@ public class CalciteQueryTest .granularity( new PeriodGranularity( Period.months(1), - new DateTime("1970-01-01T01:02:03"), + DateTimes.of("1970-01-01T01:02:03"), DateTimeZone.UTC ) ) @@ -4710,7 +4713,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/2000-01-02"))) + .intervals(QSS(Intervals.of("2000/2000-01-02"))) .granularity(new PeriodGranularity(Period.hours(1), null, DateTimeZone.UTC)) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) @@ -5507,7 +5510,7 @@ public class CalciteQueryTest // Generate timestamps for expected results private static long T(final String timeString) { - return Calcites.jodaToCalciteTimestamp(new DateTime(timeString), DateTimeZone.UTC); + return Calcites.jodaToCalciteTimestamp(DateTimes.of(timeString), DateTimeZone.UTC); } // Generate timestamps for expected results @@ -5520,7 +5523,7 @@ public class CalciteQueryTest // Generate day numbers for expected results private static int D(final String dayString) { - return (int) (new Interval(T("1970"), T(dayString)).toDurationMillis() / (86400L * 1000L)); + return (int) (Intervals.utc(T("1970"), T(dayString)).toDurationMillis() / (86400L * 1000L)); } private static QuerySegmentSpec QSS(final Interval... intervals) @@ -5583,7 +5586,7 @@ public class CalciteQueryTest private static BoundDimFilter TIME_BOUND(final Object intervalObj) { - final Interval interval = new Interval(intervalObj); + final Interval interval = new Interval(intervalObj, ISOChronology.getInstanceUTC()); return new BoundDimFilter( Column.TIME_COLUMN_NAME, String.valueOf(interval.getStartMillis()), diff --git a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java index b3498bd5862..a0ec577305a 100644 --- a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java @@ -21,6 +21,7 @@ package io.druid.sql.calcite.expression; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.ExprEval; @@ -75,7 +76,7 @@ public class ExpressionsTest .add("dstr", ValueType.STRING) .build(); private final Map bindings = ImmutableMap.builder() - .put("t", new DateTime("2000-02-03T04:05:06").getMillis()) + .put("t", DateTimes.of("2000-02-03T04:05:06").getMillis()) .put("a", 10) .put("b", 20) .put("x", 2.5) @@ -155,11 +156,11 @@ public class ExpressionsTest testExpression( rexBuilder.makeCall( new TimeFloorOperatorConversion().calciteOperator(), - timestampLiteral(new DateTime("2000-02-03T04:05:06Z")), + timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z")), rexBuilder.makeLiteral("PT1H") ), DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H','','UTC')"), - new DateTime("2000-02-03T04:00:00").getMillis() + DateTimes.of("2000-02-03T04:00:00").getMillis() ); testExpression( @@ -183,7 +184,7 @@ public class ExpressionsTest ), "timestamp_floor(\"t\",'P1D','','America/Los_Angeles')" ), - new DateTime("2000-02-02T08:00:00").getMillis() + DateTimes.of("2000-02-02T08:00:00").getMillis() ); } @@ -205,7 +206,7 @@ public class ExpressionsTest ), "timestamp_floor(\"t\",'P1Y','','UTC')" ), - new DateTime("2000").getMillis() + DateTimes.of("2000").getMillis() ); } @@ -221,7 +222,7 @@ public class ExpressionsTest rexBuilder.makeFlag(TimeUnitRange.YEAR) ), DruidExpression.fromExpression("timestamp_ceil(\"t\",'P1Y','','UTC')"), - new DateTime("2001").getMillis() + DateTimes.of("2001").getMillis() ); } @@ -236,7 +237,7 @@ public class ExpressionsTest rexBuilder.makeLiteral(-3, typeFactory.createSqlType(SqlTypeName.INTEGER), true) ), DruidExpression.fromExpression("timestamp_shift(\"t\",'PT2H',-3)"), - new DateTime("2000-02-02T22:05:06").getMillis() + DateTimes.of("2000-02-02T22:05:06").getMillis() ); } @@ -292,7 +293,7 @@ public class ExpressionsTest null, "(\"t\" + 90060000)" ), - new DateTime("2000-02-03T04:05:06").plus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").plus(period).getMillis() ); } @@ -314,7 +315,7 @@ public class ExpressionsTest null, "timestamp_shift(\"t\",concat('P', 13, 'M'),1)" ), - new DateTime("2000-02-03T04:05:06").plus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").plus(period).getMillis() ); } @@ -339,7 +340,7 @@ public class ExpressionsTest null, "(\"t\" - 90060000)" ), - new DateTime("2000-02-03T04:05:06").minus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").minus(period).getMillis() ); } @@ -364,7 +365,7 @@ public class ExpressionsTest null, "timestamp_shift(\"t\",concat('P', 13, 'M'),-1)" ), - new DateTime("2000-02-03T04:05:06").minus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").minus(period).getMillis() ); } @@ -378,7 +379,7 @@ public class ExpressionsTest rexBuilder.makeLiteral("yyyy-MM-dd HH:mm:ss") ), DruidExpression.fromExpression("timestamp_parse(\"tstr\",'yyyy-MM-dd HH:mm:ss')"), - new DateTime("2000-02-03T04:05:06").getMillis() + DateTimes.of("2000-02-03T04:05:06").getMillis() ); testExpression( @@ -389,7 +390,7 @@ public class ExpressionsTest rexBuilder.makeLiteral("America/Los_Angeles") ), DruidExpression.fromExpression("timestamp_parse(\"tstr\",'yyyy-MM-dd HH:mm:ss','America/Los_Angeles')"), - new DateTime("2000-02-03T04:05:06-08:00").getMillis() + DateTimes.of("2000-02-03T04:05:06-08:00").getMillis() ); } @@ -481,7 +482,7 @@ public class ExpressionsTest SimpleExtraction.of("t", null), "\"t\"" ), - new DateTime("2000-02-03T04:05:06Z").getMillis() + DateTimes.of("2000-02-03T04:05:06Z").getMillis() ); testExpression( @@ -493,7 +494,7 @@ public class ExpressionsTest null, "timestamp_parse(\"tstr\",'yyyy-MM-dd HH:mm:ss')" ), - new DateTime("2000-02-03T04:05:06Z").getMillis() + DateTimes.of("2000-02-03T04:05:06Z").getMillis() ); } @@ -526,7 +527,7 @@ public class ExpressionsTest SimpleExtraction.of("t", null), "\"t\"" ), - new DateTime("2000-02-03T04:05:06").getMillis() + DateTimes.of("2000-02-03T04:05:06").getMillis() ); } @@ -542,7 +543,7 @@ public class ExpressionsTest SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)), "timestamp_floor(\"t\",'P1D','','UTC')" ), - new DateTime("2000-02-03").getMillis() + DateTimes.of("2000-02-03").getMillis() ); testExpression( @@ -553,7 +554,7 @@ public class ExpressionsTest DruidExpression.fromExpression( "timestamp_floor(timestamp_parse(\"dstr\",'yyyy-MM-dd'),'P1D','','UTC')" ), - new DateTime("2000-02-03").getMillis() + DateTimes.of("2000-02-03").getMillis() ); } @@ -586,7 +587,7 @@ public class ExpressionsTest SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)), "timestamp_floor(\"t\",'P1D','','UTC')" ), - new DateTime("2000-02-03").getMillis() + DateTimes.of("2000-02-03").getMillis() ); } diff --git a/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java b/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java index e38d12c399c..059d62fb259 100644 --- a/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java @@ -20,10 +20,10 @@ package io.druid.sql.calcite.filtration; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.Intervals; import io.druid.query.filter.IntervalDimFilter; import io.druid.query.filter.NotDimFilter; import io.druid.segment.column.Column; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -36,7 +36,7 @@ public class FiltrationTest new NotDimFilter( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - ImmutableList.of(new Interval("2000/2001"), new Interval("2002/2003")), + ImmutableList.of(Intervals.of("2000/2001"), Intervals.of("2002/2003")), null ) ), @@ -52,7 +52,7 @@ public class FiltrationTest new NotDimFilter( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - ImmutableList.of(new Interval("2000/2001"), new Interval("2002/2003")), + ImmutableList.of(Intervals.of("2000/2001"), Intervals.of("2002/2003")), null ) ), diff --git a/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java index 49497ac5fa4..119342966bd 100644 --- a/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.data.input.InputRow; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -45,7 +46,6 @@ import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.schema.Table; import org.apache.calcite.sql.type.SqlTypeName; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -116,7 +116,7 @@ public class DruidSchemaTest walker = new SpecificSegmentsQuerySegmentWalker(CalciteTests.queryRunnerFactoryConglomerate()).add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) - .interval(new Interval("2000/P1Y")) + .interval(Intervals.of("2000/P1Y")) .version("1") .shardSpec(new LinearShardSpec(0)) .build(), @@ -124,7 +124,7 @@ public class DruidSchemaTest ).add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) - .interval(new Interval("2001/P1Y")) + .interval(Intervals.of("2001/P1Y")) .version("1") .shardSpec(new LinearShardSpec(0)) .build(), diff --git a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java index fc3f30e0c0a..72bee58bdaa 100644 --- a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java @@ -99,6 +99,7 @@ import io.druid.sql.guice.SqlModule; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import java.io.File; import java.nio.ByteBuffer; @@ -423,7 +424,7 @@ public class CalciteTests { return PARSER.parse( ImmutableMap.of( - "t", new DateTime(t).getMillis(), + "t", new DateTime(t, ISOChronology.getInstanceUTC()).getMillis(), "dim1", dim1, "dim2", dim2, "m1", m1